From 7272ccf0d21ba3592b293fcb30523cb4842730a3 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 5 Oct 2023 14:44:52 -0500 Subject: [PATCH 01/69] #1247 (Mac Catalyst edition) --- clients/ios/Classes/ActivityModule.m | 8 +- clients/ios/Classes/AddSiteViewController.m | 6 +- .../Classes/AuthorizeServicesViewController.m | 5 +- clients/ios/Classes/BaseViewController.h | 9 ++- clients/ios/Classes/BaseViewController.m | 30 +++++++ .../ios/Classes/DetailViewController.swift | 18 ++--- clients/ios/Classes/FeedChooserTitleView.m | 2 +- clients/ios/Classes/FeedDetailGridView.swift | 2 + .../Classes/FeedDetailObjCViewController.h | 2 + .../Classes/FeedDetailObjCViewController.m | 16 +++- clients/ios/Classes/FeedTableCell.m | 13 ++- clients/ios/Classes/FeedsObjCViewController.h | 2 + clients/ios/Classes/FeedsObjCViewController.m | 66 +++++++++++++--- .../FirstTimeUserAddFriendsViewController.m | 2 +- .../FirstTimeUserAddNewsBlurViewController.m | 2 +- .../FirstTimeUserAddSitesViewController.m | 2 +- .../ios/Classes/FirstTimeUserViewController.m | 2 +- clients/ios/Classes/FolderTitleView.m | 18 ++--- .../ios/Classes/FriendsListViewController.m | 6 +- clients/ios/Classes/InteractionsModule.m | 8 +- clients/ios/Classes/LoginViewController.m | 12 +-- clients/ios/Classes/MoveSiteViewController.m | 2 +- clients/ios/Classes/NewsBlurAppDelegate.h | 2 - clients/ios/Classes/NewsBlurAppDelegate.m | 79 +++++++++---------- .../ios/Classes/NotificationsViewController.m | 2 +- .../ios/Classes/OriginalStoryViewController.m | 10 +-- clients/ios/Classes/ShareViewController.m | 2 +- clients/ios/Classes/SmallActivityCell.m | 3 +- clients/ios/Classes/SmallInteractionCell.m | 5 +- clients/ios/Classes/Story.swift | 4 +- .../Classes/StoryDetailObjCViewController.m | 20 +++-- .../Classes/StoryPagesObjCViewController.m | 8 +- clients/ios/Classes/TrainerViewController.m | 2 +- clients/ios/Classes/UnreadCountView.m | 4 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 6 +- .../AFNetworking/AFURLRequestSerialization.m | 4 +- .../AFNetworking/AFURLSessionManager.m | 6 +- .../UIRefreshControl+AFNetworking.h | 2 +- .../UIRefreshControl+AFNetworking.m | 2 +- .../IASKAppSettingsViewController.m | 2 +- .../Models/IASKSettingsReader.m | 2 +- .../OnePasswordExtension.m | 2 +- .../ios/Resources/MainInterface.storyboard | 28 ++++--- 43 files changed, 270 insertions(+), 158 deletions(-) diff --git a/clients/ios/Classes/ActivityModule.m b/clients/ios/Classes/ActivityModule.m index 59637f53f9..5656e03de0 100644 --- a/clients/ios/Classes/ActivityModule.m +++ b/clients/ios/Classes/ActivityModule.m @@ -154,7 +154,7 @@ - (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger - (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath { NSInteger activitiesCount = [appDelegate.userActivitiesArray count]; int minimumHeight; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { minimumHeight = MINIMUM_ACTIVITY_HEIGHT_IPAD; } else { minimumHeight = MINIMUM_ACTIVITY_HEIGHT_IPHONE; @@ -165,7 +165,7 @@ - (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPa } id activityCell; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { activityCell = [[ActivityCell alloc] init]; } else { activityCell = [[SmallActivityCell alloc] init]; @@ -185,7 +185,7 @@ - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(N ActivityCell *cell = [tableView dequeueReusableCellWithIdentifier:@"ActivityCell"]; if (cell == nil) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { cell = [[ActivityCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"ActivityCell"]; @@ -304,7 +304,7 @@ - (UITableViewCell *)makeLoadingCell { UIImage *img = [UIImage imageNamed:@"fleuron.png"]; UIImageView *fleuron = [[UIImageView alloc] initWithImage:img]; int height; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { height = MINIMUM_ACTIVITY_HEIGHT_IPAD; } else { height = MINIMUM_ACTIVITY_HEIGHT_IPHONE; diff --git a/clients/ios/Classes/AddSiteViewController.m b/clients/ios/Classes/AddSiteViewController.m index b8517af6e8..deb96de737 100644 --- a/clients/ios/Classes/AddSiteViewController.m +++ b/clients/ios/Classes/AddSiteViewController.m @@ -93,7 +93,7 @@ - (void)viewWillAppear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; @@ -130,7 +130,7 @@ - (CGSize)preferredContentSize { } - (IBAction)doCancelButton { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [appDelegate hidePopover]; } else { [appDelegate hidePopoverAnimated:YES]; @@ -272,7 +272,7 @@ - (IBAction)addSite { [self.errorLabel setText:[responseObject valueForKey:@"message"]]; [self.errorLabel setHidden:NO]; } else { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self->appDelegate hidePopover]; } else { [self->appDelegate hidePopoverAnimated:YES]; diff --git a/clients/ios/Classes/AuthorizeServicesViewController.m b/clients/ios/Classes/AuthorizeServicesViewController.m index 2d3e2b9db4..70d0f418ef 100644 --- a/clients/ios/Classes/AuthorizeServicesViewController.m +++ b/clients/ios/Classes/AuthorizeServicesViewController.m @@ -53,7 +53,7 @@ - (void)viewWillAppear:(BOOL)animated { [self.webView loadRequest:requestObj]; }]; - if (self.fromStory && [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (self.fromStory && !appDelegate.isPhone) { UIBarButtonItem *cancelButton = [[UIBarButtonItem alloc] initWithTitle: @"Cancel" style: UIBarButtonItemStylePlain @@ -75,6 +75,7 @@ - (void)doCancelButton { } - (void)webView:(WKWebView *)webView decidePolicyForNavigationAction:(WKNavigationAction *)navigationAction decisionHandler:(void (^)(WKNavigationActionPolicy))decisionHandler { + BOOL isPhone = appDelegate.isPhone; NSURLRequest *request = navigationAction.request; NSString *URLString = [[request URL] absoluteString]; NSLog(@"URL STRING IS %@", URLString); @@ -86,7 +87,7 @@ - (void)webView:(WKWebView *)webView decidePolicyForNavigationAction:(WKNavigati if (self.fromStory) { [self.appDelegate refreshUserProfile:^{ - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!isPhone) { [self.appDelegate.shareNavigationController viewWillAppear:YES]; [self.appDelegate.modalNavigationController dismissViewControllerAnimated:YES completion:nil]; } else { diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index 7666afb74a..960576a102 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -1,8 +1,13 @@ #import #import "MBProgressHUD.h" -@interface BaseViewController : UIViewController { -} +@interface BaseViewController : UIViewController + +@property (nonatomic, readonly) BOOL isPhone; +@property (nonatomic, readonly) BOOL isMac; +@property (nonatomic, readonly) BOOL isVision; +@property (nonatomic, readonly) BOOL isPortrait; +@property (nonatomic, readonly) BOOL isCompactWidth; - (void)informError:(id)error; - (void)informError:(id)error statusCode:(NSInteger)statusCode; diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index f026a48210..999aba944a 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -177,4 +177,34 @@ - (UIStatusBarStyle)preferredStatusBarStyle { return UIStatusBarStyleLightContent; } +- (BOOL)isPhone { + return [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone; +} + +- (BOOL)isMac { + return [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomMac; +} + +- (BOOL)isVision { + if (@available(iOS 17.0, *)) { + return [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomVision; + } else { + return NO; + } +} + +- (BOOL)isPortrait { + UIInterfaceOrientation orientation = self.view.window.windowScene.interfaceOrientation; + if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) { + return YES; + } else { + return NO; + } +} + +- (BOOL)isCompactWidth { + return self.view.window.windowScene.traitCollection.horizontalSizeClass == UIUserInterfaceSizeClassCompact; + //return self.compactWidth > 0.0; +} + @end diff --git a/clients/ios/Classes/DetailViewController.swift b/clients/ios/Classes/DetailViewController.swift index 8a92af4a5a..78c20f6ede 100644 --- a/clients/ios/Classes/DetailViewController.swift +++ b/clients/ios/Classes/DetailViewController.swift @@ -185,20 +185,10 @@ class DetailViewController: BaseViewController { } } - /// Returns `true` if the device is an iPhone, otherwise `false`. - @objc var isPhone: Bool { - return UIDevice.current.userInterfaceIdiom == .phone - } - - /// Returns `true` if the window is in portrait orientation, otherwise `false`. - @objc var isPortraitOrientation: Bool { - return view.window?.windowScene?.interfaceOrientation.isPortrait ?? false - } - /// Position of the divider between the views. var dividerPosition: CGFloat { get { - let key = isPortraitOrientation ? Key.verticalPosition : Key.horizontalPosition + let key = isPortrait ? Key.verticalPosition : Key.horizontalPosition let value = CGFloat(UserDefaults.standard.float(forKey: key)) if value == 0 { @@ -212,7 +202,7 @@ class DetailViewController: BaseViewController { return } - let key = isPortraitOrientation ? Key.verticalPosition : Key.horizontalPosition + let key = isPortrait ? Key.verticalPosition : Key.horizontalPosition UserDefaults.standard.set(Float(newValue), forKey: key) } @@ -453,6 +443,10 @@ private extension DetailViewController { func checkViewControllers() { let isTop = layout == .top +#if targetEnvironment(macCatalyst) +// splitViewController?.primaryBackgroundStyle = .sidebar //TODO: work in progress +#endif + if layout != .grid || isPhone { storyPagesViewController = listStoryPagesViewController _ = storyPagesViewController?.view diff --git a/clients/ios/Classes/FeedChooserTitleView.m b/clients/ios/Classes/FeedChooserTitleView.m index 106548bbb8..8f090aab5a 100644 --- a/clients/ios/Classes/FeedChooserTitleView.m +++ b/clients/ios/Classes/FeedChooserTitleView.m @@ -79,7 +79,7 @@ - (void)drawRect:(CGRect)rect { UIImage *folderImage = [UIImage imageNamed:@"folder-open"]; CGFloat folderImageViewX = 10.0; - if ([[UIDevice currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPad) { + if (((NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]).isPhone) { folderImageViewX = 7.0; } diff --git a/clients/ios/Classes/FeedDetailGridView.swift b/clients/ios/Classes/FeedDetailGridView.swift index 8b802e1cae..a7020b6a6e 100644 --- a/clients/ios/Classes/FeedDetailGridView.swift +++ b/clients/ios/Classes/FeedDetailGridView.swift @@ -136,6 +136,7 @@ struct FeedDetailGridView: View { } } .modify({ view in +#if !targetEnvironment(macCatalyst) if #available(iOS 15.0, *) { view.refreshable { if cache.canPullToRefresh { @@ -143,6 +144,7 @@ struct FeedDetailGridView: View { } } } +#endif }) } .background(Color.themed([0xE0E0E0, 0xFFF8CA, 0x363636, 0x101010])) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.h b/clients/ios/Classes/FeedDetailObjCViewController.h index 8434d29949..05751935ff 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.h +++ b/clients/ios/Classes/FeedDetailObjCViewController.h @@ -50,7 +50,9 @@ @property (nonatomic) IBOutlet UIBarButtonItem * titleImageBarButton; @property (nonatomic, retain) NBNotifier *notifier; @property (nonatomic, retain) StoriesCollection *storiesCollection; +#if !TARGET_OS_MACCATALYST @property (nonatomic) UIRefreshControl *refreshControl; +#endif @property (nonatomic) UISearchBar *searchBar; @property (nonatomic) IBOutlet UIView *messageView; @property (nonatomic) IBOutlet UILabel *messageLabel; diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 49dc5dc9fe..76f278fabd 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -106,7 +106,7 @@ - (void)viewDidLoad { if (@available(iOS 15.0, *)) { self.storyTitlesTable.allowsFocus = NO; } - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.storyTitlesTable.dragDelegate = self; self.storyTitlesTable.dragInteractionEnabled = YES; } @@ -119,10 +119,12 @@ - (void)viewDidLoad { initWithBarButtonSystemItem:UIBarButtonSystemItemFixedSpace target:nil action:nil]; spacer2BarButton.width = 0; +#if !TARGET_OS_MACCATALYST self.refreshControl = [UIRefreshControl new]; self.refreshControl.tintColor = UIColorFromLightDarkRGB(0x0, 0xffffff); self.refreshControl.backgroundColor = UIColorFromRGB(0xE3E6E0); [self.refreshControl addTarget:self action:@selector(refresh:) forControlEvents:UIControlEventValueChanged]; +#endif self.searchBar = [[UISearchBar alloc] initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.storyTitlesTable.frame), 44.)]; @@ -434,7 +436,7 @@ - (void)viewWillAppear:(BOOL)animated { if (storiesCollection == nil) { NSString *appOpening = [userPreferences stringForKey:@"app_opening"]; - if ([appOpening isEqualToString:@"feeds"] && [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if ([appOpening isEqualToString:@"feeds"] && !self.isPhone) { self.messageLabel.text = @"Select a feed to read"; self.messageView.hidden = NO; } @@ -510,11 +512,13 @@ - (void)viewWillAppear:(BOOL)animated { [self.searchBar setShowsCancelButton:NO animated:YES]; } +#if !TARGET_OS_MACCATALYST if (self.canPullToRefresh) { self.storyTitlesTable.refreshControl = self.refreshControl; } else { self.storyTitlesTable.refreshControl = nil; } +#endif [self updateTheme]; @@ -1440,7 +1444,7 @@ - (void)testForTryFeed { NSUserDefaults *preferences = [NSUserDefaults standardUserDefaults]; NSString *feedOpening = [preferences stringForKey:@"feed_opening"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && feedOpening == nil) { + if (!self.isPhone && feedOpening == nil) { feedOpening = @"story"; } @@ -3068,8 +3072,10 @@ - (void)updateTheme { self.navigationItem.titleView = [appDelegate makeFeedTitle:storiesCollection.activeFeed]; } +#if !TARGET_OS_MACCATALYST self.refreshControl.tintColor = UIColorFromLightDarkRGB(0x0, 0xffffff); self.refreshControl.backgroundColor = UIColorFromRGB(0xE3E6E0); +#endif self.searchBar.backgroundColor = UIColorFromRGB(0xE3E6E0); self.searchBar.tintColor = UIColorFromRGB(0xffffff); @@ -3165,6 +3171,7 @@ - (BOOL)canPullToRefresh { return appDelegate.storiesCollection.activeFeed != nil && !river && !infrequent && !saved && !read && !widget; } +#if !TARGET_OS_MACCATALYST - (void)refresh:(UIRefreshControl *)refreshControl { if (self.canPullToRefresh) { self.inPullToRefresh_ = YES; @@ -3173,10 +3180,13 @@ - (void)refresh:(UIRefreshControl *)refreshControl { [self finishRefresh]; } } +#endif - (void)finishRefresh { self.inPullToRefresh_ = NO; +#if !TARGET_OS_MACCATALYST [self.refreshControl endRefreshing]; +#endif } #pragma mark - diff --git a/clients/ios/Classes/FeedTableCell.m b/clients/ios/Classes/FeedTableCell.m index 88c0ab7a63..526af3f3ed 100644 --- a/clients/ios/Classes/FeedTableCell.m +++ b/clients/ios/Classes/FeedTableCell.m @@ -174,11 +174,18 @@ - (void)drawRect:(CGRect)r { BOOL isHighlighted = cell.highlighted || cell.selected; UIColor *backgroundColor; +#if TARGET_OS_MACCATALYST + backgroundColor = cell.isSocial ? UIColorFromRGB(0xD8E3DB) : + cell.isSearch ? UIColorFromRGB(0xDBDFE6) : + cell.isSaved ? UIColorFromRGB(0xDFDCD6) : + UIColor.clearColor; +#else backgroundColor = cell.isSocial ? UIColorFromRGB(0xD8E3DB) : cell.isSearch ? UIColorFromRGB(0xDBDFE6) : cell.isSaved ? UIColorFromRGB(0xDFDCD6) : UIColorFromRGB(0xF7F8F5); - +#endif + // [backgroundColor set]; self.backgroundColor = backgroundColor; cell.backgroundColor = backgroundColor; @@ -219,7 +226,7 @@ - (void)drawRect:(CGRect)r { paragraphStyle.alignment = NSTextAlignmentLeft; CGSize faviconSize; if (cell.isSocial) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!cell.appDelegate.isPhone) { faviconSize = CGSizeMake(28, 28); UIImage *feedIcon = [Utilities roundCorneredImage:cell.feedFavicon radius:4 convertToSize:faviconSize]; [feedIcon drawInRect:CGRectMake(9.0, CGRectGetMidY(r)-faviconSize.height/2, faviconSize.width, faviconSize.height)]; @@ -239,7 +246,7 @@ - (void)drawRect:(CGRect)r { } else { faviconSize = CGSizeMake(16, 16); UIImage *feedIcon = [Utilities roundCorneredImage:cell.feedFavicon radius:4 convertToSize:faviconSize]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!cell.appDelegate.isPhone) { [feedIcon drawInRect:CGRectMake(12.0, CGRectGetMidY(r)-faviconSize.height/2, faviconSize.width, faviconSize.height)]; [cell.feedTitle drawInRect:CGRectMake(36.0, titleOffsetY, r.size.width - ([cell.unreadCount offsetWidth] + 36) - 10, font.pointSize*1.4) withAttributes:@{NSFontAttributeName: font, diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index 7dda6f8b7e..bcac146ba8 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -74,7 +74,9 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic, readwrite) BOOL viewShowingAllFeeds; @property (nonatomic, readwrite) BOOL interactiveFeedDetailTransition; @property (nonatomic, readwrite) BOOL isOffline; +#if !TARGET_OS_MACCATALYST @property (nonatomic) UIRefreshControl *refreshControl; +#endif @property (nonatomic) UISearchBar *searchBar; @property (nonatomic, strong) NSArray *searchFeedIds; @property (nonatomic) NSCache *imageCache; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index 68e59a5e2a..a5120fa5bd 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -113,12 +113,14 @@ - (void)viewDidLoad { self.rowHeights = [NSMutableDictionary dictionary]; self.folderTitleViews = [NSMutableDictionary dictionary]; +#if !TARGET_OS_MACCATALYST self.refreshControl = [UIRefreshControl new]; self.refreshControl.tintColor = UIColorFromLightDarkRGB(0x0, 0xffffff); self.refreshControl.backgroundColor = UIColorFromRGB(0xE3E6E0); [self.refreshControl addTarget:self action:@selector(refresh:) forControlEvents:UIControlEventValueChanged]; self.feedTitlesTable.refreshControl = self.refreshControl; self.feedViewToolbar.translatesAutoresizingMaskIntoConstraints = NO; +#endif self.searchBar = [[UISearchBar alloc] initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.feedTitlesTable.frame), 44.)]; @@ -169,7 +171,13 @@ - (void)viewDidLoad { [[UIBarButtonItem appearance] setTitleTextAttributes:@{NSForegroundColorAttributeName: UIColorFromFixedRGB(0x4C4D4A)} forState:UIControlStateHighlighted]; +#if TARGET_OS_MACCATALYST +// self.view.superview.backgroundColor = UIColor.clearColor; +// self.view.backgroundColor = UIColor.clearColor; + self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress +#else self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); +#endif self.navigationController.navigationBar.tintColor = UIColorFromRGB(0x8F918B); self.navigationController.navigationBar.translucent = NO; UIInterfaceOrientation orientation = self.view.window.windowScene.interfaceOrientation; @@ -194,7 +202,12 @@ - (void)viewDidLoad { self.notifier.topOffsetConstraint = [NSLayoutConstraint constraintWithItem:self.notifier attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self.feedViewToolbar attribute:NSLayoutAttributeTop multiplier:1.0 constant:0]; [self.view addConstraint:self.notifier.topOffsetConstraint]; +#if TARGET_OS_MACCATALYST +// self.feedTitlesTable.backgroundColor = UIColor.clearColor; + self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress +#else self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); +#endif self.feedTitlesTable.separatorColor = [UIColor clearColor]; self.feedTitlesTable.translatesAutoresizingMaskIntoConstraints = NO; self.feedTitlesTable.estimatedRowHeight = 0; @@ -228,7 +241,7 @@ - (void)viewWillAppear:(BOOL)animated { [self resetRowHeights]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && +// if (!self.isPhone && // !self.interactiveFeedDetailTransition) { // // [appDelegate.masterContainerViewController transitionFromFeedDetail]; @@ -236,6 +249,13 @@ - (void)viewWillAppear:(BOOL)animated { // NSLog(@"Feed List timing 0: %f", [NSDate timeIntervalSinceReferenceDate] - start); [super viewWillAppear:animated]; +#if TARGET_OS_MACCATALYST + UINavigationController *navController = self.navigationController; + UITitlebar *titlebar = navController.navigationBar.window.windowScene.titlebar; + + titlebar.titleVisibility = UITitlebarTitleVisibilityHidden; +#endif + NSUserDefaults *userPreferences = [NSUserDefaults standardUserDefaults]; NSInteger intelligenceLevel = [userPreferences integerForKey:@"selectedIntelligence"]; @@ -421,7 +441,7 @@ - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id)builder { + +} + #pragma mark - #pragma mark State Restoration @@ -897,7 +921,7 @@ - (void)finishLoadingFeedListWithDict:(NSDictionary *)results finished:(BOOL)fin [self refreshHeaderCounts]; [appDelegate checkForFeedNotifications]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && finished) { + if (!self.isPhone && finished) { [self cacheFeedRowLocations]; } @@ -1052,9 +1076,11 @@ - (IBAction)showSettingsPopover:(id)sender { MenuViewController *viewController = [MenuViewController new]; - [viewController addTitle:@"Preferences" iconName:@"dialog-preferences" iconColor:UIColorFromRGB(0xDF8566) selectionShouldDismiss:YES handler:^{ - [self.appDelegate showPreferences]; - }]; + if (!self.isMac) { + [viewController addTitle:@"Preferences" iconName:@"dialog-preferences" iconColor:UIColorFromRGB(0xDF8566) selectionShouldDismiss:YES handler:^{ + [self.appDelegate showPreferences]; + }]; + } [viewController addTitle:@"Mute Sites" iconName:@"menu_icn_mute.png" selectionShouldDismiss:YES handler:^{ [self.appDelegate showMuteSites]; @@ -1295,9 +1321,15 @@ - (void)updateTheme { self.feedViewToolbar.barTintColor = [UINavigationBar appearance].barTintColor; self.addBarButton.tintColor = UIColorFromRGB(0x8F918B); self.settingsBarButton.tintColor = UIColorFromRGB(0x8F918B); +#if TARGET_OS_MACCATALYST +// self.view.superview.backgroundColor = UIColor.clearColor; +// self.view.backgroundColor = UIColor.clearColor; + self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress +#else self.refreshControl.tintColor = UIColorFromLightDarkRGB(0x0, 0xffffff); self.refreshControl.backgroundColor = UIColorFromRGB(0xE3E6E0); self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); +#endif [[ThemeManager themeManager] updateSegmentedControl:self.intelligenceControl]; @@ -1322,7 +1354,13 @@ - (void)updateTheme { self.searchBar.keyboardAppearance = UIKeyboardAppearanceDefault; } +#if TARGET_OS_MACCATALYST +// self.feedTitlesTable.backgroundColor = UIColor.clearColor; + self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress +#else self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); +#endif + [self reloadFeedTitlesTable]; [self resetupGestures]; @@ -1678,7 +1716,7 @@ - (CGFloat)tableView:(UITableView *)tableView - (CGFloat)calculateHeightForRowAtIndexPath:(NSIndexPath *)indexPath { if (appDelegate.hasNoSites) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { return kBlurblogTableViewRowHeight; } else { return kPhoneBlurblogTableViewRowHeight; @@ -1722,13 +1760,13 @@ - (CGFloat)calculateHeightForRowAtIndexPath:(NSIndexPath *)indexPath { if ([folderName isEqualToString:@"river_blurblogs"] || [folderName isEqualToString:@"river_global"]) { // blurblogs - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { height = kBlurblogTableViewRowHeight; } else { height = kPhoneBlurblogTableViewRowHeight; } } else { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { height = kTableViewRowHeight; } else { height = kPhoneTableViewRowHeight; @@ -2448,7 +2486,7 @@ - (IBAction)selectIntelligence { [hud hide:YES afterDelay:0.5]; [self showExplainerOnEmptyFeedlist]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // FeedDetailViewController *storiesModule = self.appDelegate.dashboardViewController.storiesModule; // // storiesModule.storiesCollection.feedPage = 0; @@ -2677,15 +2715,19 @@ - (void)searchBar:(UISearchBar *)searchBar textDidChange:(NSString *)searchText #pragma mark - #pragma mark PullToRefresh +#if !TARGET_OS_MACCATALYST - (void)refresh:(UIRefreshControl *)refreshControl { self.inPullToRefresh_ = YES; [appDelegate reloadFeedsView:NO]; [appDelegate donateRefresh]; } +#endif - (void)finishRefresh { self.inPullToRefresh_ = NO; +#if !TARGET_OS_MACCATALYST [self.refreshControl endRefreshing]; +#endif } - (void)refreshFeedList { diff --git a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m index 72124a4626..737299d4ce 100644 --- a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m @@ -53,7 +53,7 @@ - (void)viewWillAppear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; diff --git a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m index 308622701c..b2922cc699 100644 --- a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m @@ -51,7 +51,7 @@ - (void)viewDidAppear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; diff --git a/clients/ios/Classes/FirstTimeUserAddSitesViewController.m b/clients/ios/Classes/FirstTimeUserAddSitesViewController.m index b37b658b43..99b06240ee 100644 --- a/clients/ios/Classes/FirstTimeUserAddSitesViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddSitesViewController.m @@ -89,7 +89,7 @@ - (void)viewWillAppear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; diff --git a/clients/ios/Classes/FirstTimeUserViewController.m b/clients/ios/Classes/FirstTimeUserViewController.m index f40444c8e5..6dbc872a0f 100644 --- a/clients/ios/Classes/FirstTimeUserViewController.m +++ b/clients/ios/Classes/FirstTimeUserViewController.m @@ -98,7 +98,7 @@ - (void)viewDidDisappear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; diff --git a/clients/ios/Classes/FolderTitleView.m b/clients/ios/Classes/FolderTitleView.m index f20c40b3de..a33be9291d 100644 --- a/clients/ios/Classes/FolderTitleView.m +++ b/clients/ios/Classes/FolderTitleView.m @@ -213,7 +213,7 @@ - (void) drawRect:(CGRect)rect { if (section == NewsBlurTopSectionInfrequentSiteStories) { folderImage = [UIImage imageNamed:@"ak-icon-infrequent.png"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; @@ -221,7 +221,7 @@ - (void) drawRect:(CGRect)rect { allowLongPress = YES; } else if (section == NewsBlurTopSectionAllStories) { folderImage = [UIImage imageNamed:@"all-stories"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; @@ -229,42 +229,42 @@ - (void) drawRect:(CGRect)rect { allowLongPress = NO; } else if ([folderName isEqual:@"river_global"]) { folderImage = [UIImage imageNamed:@"global-shares"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 8; } } else if ([folderName isEqual:@"river_blurblogs"]) { folderImage = [UIImage imageNamed:@"all-shares"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 8; } } else if ([folderName isEqual:@"saved_searches"]) { folderImage = [UIImage imageNamed:@"search"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; } } else if ([folderName isEqual:@"saved_stories"]) { folderImage = [UIImage imageNamed:@"saved-stories"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; } } else if ([folderName isEqual:@"read_stories"]) { folderImage = [UIImage imageNamed:@"indicator-unread"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; } } else if ([folderName isEqual:@"widget_stories"]) { folderImage = [UIImage imageNamed:@"g_icn_folder_widget.png"]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { folderImageViewX = 10; } else { folderImageViewX = 7; @@ -275,7 +275,7 @@ - (void) drawRect:(CGRect)rect { } else { folderImage = [UIImage imageNamed:@"folder-open"]; } - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { } else { folderImageViewX = 7; } diff --git a/clients/ios/Classes/FriendsListViewController.m b/clients/ios/Classes/FriendsListViewController.m index b18f1c5ba7..641325bc06 100644 --- a/clients/ios/Classes/FriendsListViewController.m +++ b/clients/ios/Classes/FriendsListViewController.m @@ -156,7 +156,7 @@ - (CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSIntege // if (self.inSearch_){ // return 0; // } else { -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad){ +// if (!self.isPhone){ // return 28; // }else{ // return 21; @@ -168,7 +168,7 @@ - (UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section { int headerLabelHeight, folderImageViewY; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { headerLabelHeight = 28; folderImageViewY = 3; } else { @@ -280,7 +280,7 @@ - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(N // add a NO FRIENDS TO SUGGEST message on either the first or second row depending on iphone/ipad int row = 0; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { row = 1; } diff --git a/clients/ios/Classes/InteractionsModule.m b/clients/ios/Classes/InteractionsModule.m index b279074db7..4f1ba0dca3 100644 --- a/clients/ios/Classes/InteractionsModule.m +++ b/clients/ios/Classes/InteractionsModule.m @@ -154,7 +154,7 @@ -(CGFloat)tableView:(UITableView *)tableView heightForHeaderInSection:(NSInteger - (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPath *)indexPath { NSInteger userInteractions = [appDelegate.userInteractionsArray count]; int minimumHeight; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { minimumHeight = MINIMUM_INTERACTION_HEIGHT_IPAD; } else { minimumHeight = MINIMUM_INTERACTION_HEIGHT_IPHONE; @@ -165,7 +165,7 @@ - (CGFloat)tableView:(UITableView *)tableView heightForRowAtIndexPath:(NSIndexPa } InteractionCell *interactionCell; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { interactionCell = [[InteractionCell alloc] init]; } else { interactionCell = [[SmallInteractionCell alloc] init]; @@ -190,7 +190,7 @@ - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(N InteractionCell *cell = [tableView dequeueReusableCellWithIdentifier:@"InteractionCell"]; if (cell == nil) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { cell = [[InteractionCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"InteractionCell"]; } else { cell = [[SmallInteractionCell alloc] initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"InteractionCell"]; @@ -276,7 +276,7 @@ - (UITableViewCell *)makeLoadingCell { UIImageView *fleuron = [[UIImageView alloc] initWithImage:img]; int height; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { height = MINIMUM_INTERACTION_HEIGHT_IPAD; } else { height = MINIMUM_INTERACTION_HEIGHT_IPHONE; diff --git a/clients/ios/Classes/LoginViewController.m b/clients/ios/Classes/LoginViewController.m index a6cd9fe7ce..9c44685f9d 100644 --- a/clients/ios/Classes/LoginViewController.m +++ b/clients/ios/Classes/LoginViewController.m @@ -71,7 +71,7 @@ - (CGFloat)xForWidth:(CGFloat)width { } - (void)rearrangeViews { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { CGSize viewSize = self.view.bounds.size; CGFloat viewWidth = viewSize.width; CGFloat yOffset = 0; @@ -98,7 +98,7 @@ - (void)viewWillAppear:(BOOL)animated { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } // return NO; @@ -108,7 +108,7 @@ - (void)viewDidAppear:(BOOL)animated { [MBProgressHUD hideHUDForView:self.view animated:YES]; [super viewDidAppear:animated]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self updateControls]; [self rearrangeViews]; } @@ -141,7 +141,7 @@ - (void)showError:(NSString *)error { self.errorLabel.hidden = !hasError; self.forgotPasswordButton.hidden = !hasError; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.loginOptionalLabel.hidden = hasError; } } @@ -166,7 +166,7 @@ - (IBAction)findLoginFrom1Password:(id)sender { - (BOOL)textFieldShouldReturn:(UITextField *)textField { [textField resignFirstResponder]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { if(textField == usernameInput) { [passwordInput becomeFirstResponder]; } else if (textField == passwordInput) { @@ -244,7 +244,7 @@ - (void)registerAccount { setCookieAcceptPolicy:NSHTTPCookieAcceptPolicyAlways]; NSMutableDictionary *params = [NSMutableDictionary dictionary]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [params setObject:[signUpUsernameInput text] forKey:@"username"]; [params setObject:[signUpPasswordInput text] forKey:@"password"]; } else { diff --git a/clients/ios/Classes/MoveSiteViewController.m b/clients/ios/Classes/MoveSiteViewController.m index 34190e24ab..d4eed46a66 100644 --- a/clients/ios/Classes/MoveSiteViewController.m +++ b/clients/ios/Classes/MoveSiteViewController.m @@ -61,7 +61,7 @@ - (void)viewDidLoad { //- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { // // Return YES for supported orientations -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // return YES; // } else if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { // return YES; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.h b/clients/ios/Classes/NewsBlurAppDelegate.h index f526de3db1..0ca8c817b7 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.h +++ b/clients/ios/Classes/NewsBlurAppDelegate.h @@ -287,7 +287,6 @@ SFSafariViewControllerDelegate> { @property (nonatomic, readwrite) BOOL hasQueuedReadStories; @property (nonatomic, readwrite) BOOL hasQueuedSavedStories; @property (nonatomic, readonly) BOOL showingSafariViewController; -@property (nonatomic, readonly) BOOL isCompactWidth; //@property (nonatomic) CGFloat compactWidth; @property (nonatomic, strong) BGAppRefreshTask *backgroundAppRefreshTask; @@ -395,7 +394,6 @@ SFSafariViewControllerDelegate> { - (BOOL)isSavedStoriesIntelligenceMode; - (NSArray *)allFeedIds; - (NSArray *)feedIdsForFolderTitle:(NSString *)folderTitle; -- (BOOL)isPortrait; - (void)confirmLogout; - (void)showConnectToService:(NSString *)serviceName; - (void)showAlert:(UIAlertController *)alert withViewController:(UIViewController *)vc; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 56dcedca75..4e5f70f579 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -207,7 +207,7 @@ - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions: // self.navigationController.viewControllers = [NSArray arrayWithObject:self.feedsViewController]; self.storiesCollection = [StoriesCollection new]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if ([[UIDevice currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPhond) { // self.window.rootViewController = self.masterContainerViewController; // } else { // self.window.rootViewController = self.navigationController; @@ -242,7 +242,7 @@ - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions: (unsigned long)NULL), ^(void) { [self setupReachability]; self.cacheImagesOperationQueue = [NSOperationQueue new]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.cacheImagesOperationQueue.maxConcurrentOperationCount = 2; } else { self.cacheImagesOperationQueue.maxConcurrentOperationCount = 1; @@ -423,6 +423,10 @@ - (BOOL)handleShortcutItem:(UIApplicationShortcutItem *)shortcutItem { return handled; } +- (void)buildMenuWithBuilder:(id)builder { + +} + - (void)delayedAddSite { [self.feedsViewController tapAddSite:self]; } @@ -476,7 +480,7 @@ - (void)registerDefaultsFromSettingsBundle { return; } - NSString *name = [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad ? @"Root~ipad.plist" : @"Root.plist"; + NSString *name = !self.isPhone ? @"Root~ipad.plist" : @"Root.plist"; NSDictionary *settings = [NSDictionary dictionaryWithContentsOfFile:[settingsBundle stringByAppendingPathComponent:name]]; NSArray *preferences = [settings objectForKey:@"PreferenceSpecifiers"]; @@ -767,7 +771,7 @@ - (void)showUserProfileModal:(id)sender { newUserProfile.navigationItem.title = self.activeUserProfileName; newUserProfile.navigationItem.backBarButtonItem.title = self.activeUserProfileName; [newUserProfile getUserProfile]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self showPopoverWithViewController:self.userProfileNavigationController contentSize:CGSizeMake(320, 454) sender:sender]; } else { [self.feedsNavigationController presentViewController:navController animated:YES completion:nil]; @@ -799,7 +803,7 @@ - (void)pushUserProfile { } - (void)hideUserProfileModal { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self hidePopover]; } else { [self.feedsNavigationController dismissViewControllerAnimated:YES completion:nil]; @@ -896,6 +900,11 @@ - (void)addSplitControlToMenuController:(MenuViewController *)menuViewController } - (void)showPreferences { + if (self.isMac) { +// [[UIApplication sharedApplication] sendAction:@selector(orderFrontPreferencesPanel:) to:nil from:nil forEvent:nil]; + return; + } + if (!preferencesViewController) { preferencesViewController = [[IASKAppSettingsViewController alloc] init]; [[ThemeManager themeManager] addThemeGestureRecognizerToView:self.preferencesViewController.view]; @@ -917,7 +926,7 @@ - (void)showPreferences { self.modalNavigationController = navController; self.modalNavigationController.navigationBar.translucent = NO; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.modalNavigationController.modalPresentationStyle = UIModalPresentationFormSheet; } @@ -1092,7 +1101,7 @@ - (void)showSendTo:(UIViewController *)vc sender:(id)sender } }]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { BOOL fromPopover = [self hidePopoverAnimated:NO]; [self.splitViewController presentViewController:activityViewController animated:!fromPopover completion:nil]; activityViewController.modalPresentationStyle = UIModalPresentationPopover; @@ -1132,7 +1141,7 @@ - (void)showShareView:(NSString *)type setReplyId:(NSString *)replyId { [self.shareViewController setCommentType:type]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [self.masterContainerViewController transitionToShareView]; // [self.shareViewController setSiteInfo:type setUserId:userId setUsername:username setReplyId:replyId]; // } else { @@ -1154,7 +1163,7 @@ - (void)hideShareView:(BOOL)resetComment { self.shareViewController.currentType = nil; } -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [self.masterContainerViewController transitionFromShareView]; // [self.storyPagesViewController becomeFirstResponder]; // } else @@ -1301,7 +1310,7 @@ - (void)openTrainSiteWithFeedLoaded:(BOOL)feedLoaded from:(id)sender { trainerViewController.storyTrainer = NO; trainerViewController.feedLoaded = feedLoaded; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { // trainerViewController.modalPresentationStyle=UIModalPresentationFormSheet; // [navController presentViewController:trainerViewController animated:YES completion:nil]; [self showPopoverWithViewController:self.trainerViewController contentSize:CGSizeMake(500, 630) sender:sender]; @@ -1320,7 +1329,7 @@ - (void)openTrainStory:(id)sender { trainerViewController.feedTrainer = NO; trainerViewController.storyTrainer = YES; trainerViewController.feedLoaded = YES; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self showPopoverWithViewController:self.trainerViewController contentSize:CGSizeMake(500, 630) sender:sender]; } else { if (self.trainNavigationController == nil) { @@ -1343,7 +1352,7 @@ - (void)openNotificationsWithFeed:(NSString *)feedId { - (void)openNotificationsWithFeed:(NSString *)feedId sender:(id)sender { UINavigationController *navController = self.feedsNavigationController; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self showPopoverWithViewController:self.notificationsViewController contentSize:CGSizeMake(420, 382) sender:sender]; } else { if (self.notificationsNavigationController == nil) { @@ -1453,13 +1462,17 @@ - (void)clearNetworkManager { networkManager.responseSerializer = [AFJSONResponseSerializer serializer]; [networkManager.requestSerializer setCachePolicy:NSURLRequestReloadIgnoringLocalCacheData]; - NSString *currentiPhoneVersion = [[[NSBundle mainBundle] infoDictionary] + NSString *currentVersion = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleVersion"]; NSString *UA; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { - UA = [NSString stringWithFormat:@"NewsBlur iPad App v%@", currentiPhoneVersion]; + if (self.isMac) { + UA = [NSString stringWithFormat:@"NewsBlur Mac App v%@", currentVersion]; + } else if (self.isVision) { + UA = [NSString stringWithFormat:@"NewsBlur Vision App v%@", currentVersion]; + } else if (self.isPhone) { + UA = [NSString stringWithFormat:@"NewsBlur iPhone App v%@", currentVersion]; } else { - UA = [NSString stringWithFormat:@"NewsBlur iPhone App v%@", currentiPhoneVersion]; + UA = [NSString stringWithFormat:@"NewsBlur iPad App v%@", currentVersion]; } [networkManager.requestSerializer setValue:UA forHTTPHeaderField:@"User-Agent"]; } @@ -1699,7 +1712,7 @@ - (void)loadFeed:(NSString *)feedId [self reloadFeedsView:NO]; // dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{ -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [self loadFeedDetailView]; // } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { // // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; @@ -1747,7 +1760,7 @@ - (void)loadTryFeedDetailView:(NSString *)feedId storiesCollection.activeFeed = feed; storiesCollection.activeFolder = nil; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self loadFeedDetailView]; } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; @@ -1768,7 +1781,7 @@ - (void)loadTryFeedDetailView:(NSString *)feedId - (void)backgroundLoadNotificationStory { if (self.inFindingStoryMode) { if ([storiesCollection.activeFolder isEqualToString:@"widget_stories"]) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self.feedsViewController selectWidgetStories]; } else { [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:self.widgetFolder]; @@ -1781,7 +1794,7 @@ - (void)backgroundLoadNotificationStory { } } else if (self.tryFeedFeedId && !self.isTryFeedView) { [self loadFeed:self.tryFeedFeedId withStory:self.tryFeedStoryId animated:NO]; - } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && !self.isCompactWidth && self.storiesCollection == nil) { + } else if (!self.isPhone && !self.isCompactWidth && self.storiesCollection == nil) { [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:storiesCollection.activeFolder]; } else if (self.pendingFolder != nil) { [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:self.pendingFolder]; @@ -1821,7 +1834,7 @@ - (void)loadStarredDetailViewWithStory:(NSString *)contentId [self loadRiverFeedDetailView:feedDetailViewController withFolder:@"saved_stories"]; if (showHUD) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self.storyPagesViewController showShareHUD:@"Finding story..."]; } else { MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.feedDetailViewController.view animated:YES]; @@ -1888,20 +1901,6 @@ - (NSArray *)feedIdsForFolderTitle:(NSString *)folderTitle { } } -- (BOOL)isPortrait { - UIInterfaceOrientation orientation = self.window.windowScene.interfaceOrientation; - if (orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown) { - return YES; - } else { - return NO; - } -} - -- (BOOL)isCompactWidth { - return self.window.windowScene.traitCollection.horizontalSizeClass == UIUserInterfaceSizeClassCompact; - //return self.compactWidth > 0.0; -} - - (void)confirmLogout { UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"Positive?" message:nil preferredStyle:UIAlertControllerStyleAlert]; [alertController addAction:[UIAlertAction actionWithTitle: @"Logout" style:UIAlertActionStyleDefault handler:^(UIAlertAction * action) { @@ -2112,7 +2111,7 @@ - (void)openDashboardRiverForStory:(NSString *)contentId [self loadRiverFeedDetailView:feedDetailViewController withFolder:@"river_dashboard"]; if (showHUD) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self.storyPagesViewController showShareHUD:@"Finding story..."]; } else { MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.feedDetailViewController.view animated:YES]; @@ -2225,7 +2224,7 @@ - (void)loadStoryDetailView { [self.detailViewController checkLayout]; } - BOOL animated = ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && + BOOL animated = (!self.isPhone && !self.tryFeedCategory); [self.storyPagesViewController view]; [self.storyPagesViewController.view setNeedsLayout]; @@ -2353,7 +2352,7 @@ - (void)showInAppBrowser:(NSURL *)url withCustomTitle:(NSString *)customTitle fr self.activeOriginalStoryURL = url; originalStoryViewController.customPageTitle = customTitle; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { if ([sender isKindOfClass:[UIBarButtonItem class]]) { [originalStoryViewController view]; // Force viewDidLoad [originalStoryViewController loadInitialStory]; @@ -2408,7 +2407,7 @@ - (void)safariViewControllerDidFinish:(SFSafariViewController *)controller { } - (void)deferredSafariCleanup { -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // self.navigationController.view.frame = CGRectMake(self.navigationController.view.frame.origin.x, self.navigationController.view.frame.origin.y, self.isPortrait ? 270.0 : 370.0, self.navigationController.view.frame.size.height); // } @@ -2442,7 +2441,7 @@ - (UINavigationController *)fontSettingsNavigationController { } - (void)closeOriginalStory { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { // [self.masterContainerViewController transitionFromOriginalView]; } else { if ([[feedsNavigationController viewControllers] containsObject:originalStoryViewController]) { diff --git a/clients/ios/Classes/NotificationsViewController.m b/clients/ios/Classes/NotificationsViewController.m index 709581ce21..409f1be211 100644 --- a/clients/ios/Classes/NotificationsViewController.m +++ b/clients/ios/Classes/NotificationsViewController.m @@ -85,7 +85,7 @@ - (UIView *)tableView:(UITableView *)tableView viewForHeaderInSection:(NSInteger)section { int headerLabelHeight, folderImageViewY; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { headerLabelHeight = 36; folderImageViewY = 8; } else { diff --git a/clients/ios/Classes/OriginalStoryViewController.m b/clients/ios/Classes/OriginalStoryViewController.m index 8e31db3da5..6a3879ea2c 100644 --- a/clients/ios/Classes/OriginalStoryViewController.m +++ b/clients/ios/Classes/OriginalStoryViewController.m @@ -32,7 +32,7 @@ - (void)viewDidLoad { self.view.layer.shadowOpacity = 0.5; self.view.layer.shadowPath = [UIBezierPath bezierPathWithRect:self.view.bounds].CGPath; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { closeButton = [UIBarButtonItem barItemWithImage:[UIImage imageNamed:@"ios7_back_button"] target:self action:@selector(closeOriginalView)]; @@ -70,7 +70,7 @@ - (void)viewDidLoad { // UIGestureRecognizer *themeGesture = [[ThemeManager themeManager] addThemeGestureRecognizerToView:self.webView]; // [self.webView.scrollView.panGestureRecognizer requireGestureRecognizerToFail:themeGesture]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { UIPanGestureRecognizer *gesture = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(handlePanGesture:)]; gesture.delegate = self; @@ -215,7 +215,7 @@ - (void)handlePanGesture:(UIPanGestureRecognizer *)recognizer { center.y); self.view.center = center; [recognizer setTranslation:CGPointZero inView:self.view]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [appDelegate.masterContainerViewController interactiveTransitionFromOriginalView:percentage]; // } else { // @@ -231,7 +231,7 @@ - (void)handlePanGesture:(UIPanGestureRecognizer *)recognizer { [self transitionToFeedDetail:recognizer]; } else { // NSLog(@"Original velocity: %f (at %.2f%%)", velocity, percentage*100); -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [appDelegate.masterContainerViewController transitionToOriginalView:NO]; // } else { // @@ -241,7 +241,7 @@ - (void)handlePanGesture:(UIPanGestureRecognizer *)recognizer { } - (void)transitionToFeedDetail:(UIGestureRecognizer *)recognizer { -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { +// if (!self.isPhone) { // [appDelegate.masterContainerViewController transitionFromOriginalView]; // } else { // diff --git a/clients/ios/Classes/ShareViewController.m b/clients/ios/Classes/ShareViewController.m index eded2e0f66..c6027a0062 100644 --- a/clients/ios/Classes/ShareViewController.m +++ b/clients/ios/Classes/ShareViewController.m @@ -202,7 +202,7 @@ - (void)adjustCommentField:(CGSize)kbSize { self.storyTitle.frame = CGRectMake(20, 8, v.width - 20*2, 24); stOffset = self.storyTitle.frame.origin.y + self.storyTitle.frame.size.height; stHeight = self.storyTitle.frame.size.height; - } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + } else if (!self.isPhone) { k = 0; } NSLog(@"Share type: %@", self.currentType); diff --git a/clients/ios/Classes/SmallActivityCell.m b/clients/ios/Classes/SmallActivityCell.m index c2ffa2f69c..0e4126d675 100644 --- a/clients/ios/Classes/SmallActivityCell.m +++ b/clients/ios/Classes/SmallActivityCell.m @@ -9,6 +9,7 @@ #import "SmallActivityCell.h" #import "UIImageView+AFNetworking.h" #import +#import "NewsBlurAppDelegate.h" @implementation SmallActivityCell @@ -62,7 +63,7 @@ - (void)layoutSubviews { labelFrame.size.height = contentRect.size.height; self.activityLabel.frame = labelFrame; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!((NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]).isPhone) { self.activityLabel.backgroundColor = UIColorFromRGB(0xd7dadf); } else { self.activityLabel.backgroundColor = UIColorFromRGB(0xf6f6f6); diff --git a/clients/ios/Classes/SmallInteractionCell.m b/clients/ios/Classes/SmallInteractionCell.m index 6d346fe82e..23a7d15fd1 100644 --- a/clients/ios/Classes/SmallInteractionCell.m +++ b/clients/ios/Classes/SmallInteractionCell.m @@ -9,6 +9,7 @@ #import "SmallInteractionCell.h" #import "UIImageView+AFNetworking.h" #import +#import "NewsBlurAppDelegate.h" @implementation SmallInteractionCell @@ -57,8 +58,8 @@ - (void)layoutSubviews { labelFrame.size.width = contentRect.size.width - leftMargin - avatarSize - leftMargin - rightMargin - 20; labelFrame.size.height = contentRect.size.height; self.interactionLabel.frame = labelFrame; - - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + + if (!((NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]).isPhone) { self.interactionLabel.backgroundColor = UIColorFromRGB(0xd7dadf); } else { self.interactionLabel.backgroundColor = UIColorFromRGB(0xf6f6f6); diff --git a/clients/ios/Classes/Story.swift b/clients/ios/Classes/Story.swift index 83b4f8b3e8..a4908f93e8 100644 --- a/clients/ios/Classes/Story.swift +++ b/clients/ios/Classes/Story.swift @@ -335,14 +335,14 @@ class StorySettings { guard let pref = UserDefaults.standard.string(forKey: "grid_columns"), let columns = Int(pref) else { if NewsBlurAppDelegate.shared.isCompactWidth { return 1 - } else if NewsBlurAppDelegate.shared.isPortrait() { + } else if NewsBlurAppDelegate.shared.isPortrait { return 2 } else { return 4 } } - if NewsBlurAppDelegate.shared.isPortrait(), columns > 3 { + if NewsBlurAppDelegate.shared.isPortrait, columns > 3 { return 3 } diff --git a/clients/ios/Classes/StoryDetailObjCViewController.m b/clients/ios/Classes/StoryDetailObjCViewController.m index 865369ffff..8fd36154b9 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.m +++ b/clients/ios/Classes/StoryDetailObjCViewController.m @@ -24,8 +24,8 @@ #import "JNWThrottledBlock.h" #import "NewsBlur-Swift.h" -#define iPadPro12 ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && ([UIScreen mainScreen].bounds.size.height == 1366 || [UIScreen mainScreen].bounds.size.width == 1366)) -#define iPadPro10 ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && ([UIScreen mainScreen].bounds.size.height == 1112 || [UIScreen mainScreen].bounds.size.width == 1112)) +#define iPadPro12 (!self.isPhone && ([UIScreen mainScreen].bounds.size.height == 1366 || [UIScreen mainScreen].bounds.size.width == 1366)) +#define iPadPro10 (!self.isPhone && ([UIScreen mainScreen].bounds.size.height == 1112 || [UIScreen mainScreen].bounds.size.width == 1112)) @interface StoryDetailObjCViewController () @@ -99,7 +99,7 @@ - (void)viewDidLoad { [self.webView.scrollView setAutoresizingMask:(UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight)]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.webView.scrollView.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentNever; } @@ -401,9 +401,13 @@ - (void)loadHTMLString:(NSString *)html { static NSURL *baseURL; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ +#if TARGET_OS_MACCATALYST + baseURL = [NSBundle mainBundle].resourceURL; +#else baseURL = [NSBundle mainBundle].bundleURL; +#endif }); - + [self.webView loadHTMLString:html baseURL:baseURL]; } @@ -480,7 +484,7 @@ - (void)drawStory:(BOOL)force withOrientation:(UIInterfaceOrientation)orientatio #if TARGET_OS_MACCATALYST // CATALYST: probably will want to add custom CSS for Macs. - contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide NB-width-768"; + contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide"; #else if (UIInterfaceOrientationIsLandscape(orientation) && !self.isPhoneOrCompact) { if (iPadPro12) { @@ -503,10 +507,10 @@ - (void)drawStory:(BOOL)force withOrientation:(UIInterfaceOrientation)orientatio } else { contentWidthClass = @"NB-iphone"; } +#endif contentWidthClass = [NSString stringWithFormat:@"%@ NB-width-%d", contentWidthClass, (int)floorf(CGRectGetWidth(self.view.frame))]; -#endif if (appDelegate.feedsViewController.isOffline) { NSString *storyHash = [self.activeStory objectForKey:@"story_hash"]; @@ -2399,7 +2403,7 @@ - (void)changeWebViewWidth { #if TARGET_OS_MACCATALYST // CATALYST: probably will want to add custom CSS for Macs. - contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide NB-width-768"; + contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide"; #else UIInterfaceOrientation orientation = self.view.window.windowScene.interfaceOrientation; @@ -2424,10 +2428,10 @@ - (void)changeWebViewWidth { } else { contentWidthClass = @"NB-iphone"; } +#endif contentWidthClass = [NSString stringWithFormat:@"%@ NB-width-%d", contentWidthClass, (int)floorf(CGRectGetWidth(webView.scrollView.bounds))]; -#endif NSString *alternateViewClass = @""; if (!self.isPhoneOrCompact) { diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index 505cd1e3d6..7e973d0341 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -108,7 +108,7 @@ - (void)viewDidLoad { [self.scrollView setAlwaysBounceHorizontal:self.isHorizontal]; [self.scrollView setAlwaysBounceVertical:!self.isHorizontal]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { self.scrollView.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentNever; } @@ -663,7 +663,7 @@ - (void)reorientPages { [MBProgressHUD hideHUDForView:self.view animated:YES]; [self hideNotifier]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [currentPage realignScroll]; } } @@ -772,7 +772,7 @@ - (void)restorePage { if (pageIndex >= 0) { [self changePage:pageIndex animated:NO]; - } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + } else if (!self.isPhone) { // If the story can't be found, don't show anything; uncomment this to instead show the first unread story: // [self doNextUnreadStory:nil]; } else { @@ -1218,7 +1218,7 @@ - (void)updatePageWithActiveStory:(NSInteger)location updateFeedDetail:(BOOL)upd [appDelegate.storiesCollection pushReadStory:[appDelegate.activeStory objectForKey:@"story_hash"]]; - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { if (appDelegate.detailViewController.storyTitlesOnLeft) { appDelegate.detailViewController.navigationItem.rightBarButtonItems = [NSArray arrayWithObjects: originalStoryButton, diff --git a/clients/ios/Classes/TrainerViewController.m b/clients/ios/Classes/TrainerViewController.m index ce7c7b9d5c..76d7c682e2 100644 --- a/clients/ios/Classes/TrainerViewController.m +++ b/clients/ios/Classes/TrainerViewController.m @@ -99,7 +99,7 @@ - (void)viewWillAppear:(BOOL)animated { [self informError:@"Could not load trainer"]; dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1.0 * NSEC_PER_SEC), dispatch_get_main_queue(), ^() { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!self.isPhone) { [self.appDelegate hidePopover]; } else { [self.appDelegate.feedsNavigationController dismissViewControllerAnimated:YES completion:nil]; diff --git a/clients/ios/Classes/UnreadCountView.m b/clients/ios/Classes/UnreadCountView.m index 5d6c9c6946..abcab54181 100644 --- a/clients/ios/Classes/UnreadCountView.m +++ b/clients/ios/Classes/UnreadCountView.m @@ -54,7 +54,7 @@ - (void)drawInRect:(CGRect)r ps:(NSInteger)ps nt:(NSInteger)nt listType:(NBFeedL CGRect rr; if (listType == NBFeedListSocial) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { rr = CGRectMake(rect.size.width + rect.origin.x - psOffset, CGRectGetMidY(r)-COUNT_HEIGHT/2, psWidth, COUNT_HEIGHT); } else { rr = CGRectMake(rect.size.width + rect.origin.x - psOffset, CGRectGetMidY(r)-COUNT_HEIGHT/2, psWidth, COUNT_HEIGHT); @@ -98,7 +98,7 @@ - (void)drawInRect:(CGRect)r ps:(NSInteger)ps nt:(NSInteger)nt listType:(NBFeedL if (nt > 0 && appDelegate.selectedIntelligence <= 0) { CGRect rr; if (listType == NBFeedListSocial) { - if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad) { + if (!appDelegate.isPhone) { rr = CGRectMake(rect.size.width + rect.origin.x - psWidth - psPadding - ntOffset, CGRectGetMidY(r)-COUNT_HEIGHT/2, ntWidth, COUNT_HEIGHT); } else { rr = CGRectMake(rect.size.width + rect.origin.x - psWidth - psPadding - ntOffset, CGRectGetMidY(r)-COUNT_HEIGHT/2, ntWidth, COUNT_HEIGHT); diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 6595bdd5b8..27ceae4c3f 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -3,7 +3,7 @@ archiveVersion = 1; classes = { }; - objectVersion = 52; + objectVersion = 54; objects = { /* Begin PBXBuildFile section */ @@ -5539,7 +5539,9 @@ PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; STRIP_INSTALLED_PRODUCT = NO; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; SWIFT_OBJC_INTERFACE_HEADER_NAME = "NewsBlur-Swift.h"; TARGETED_DEVICE_FAMILY = "1,2,6"; @@ -5586,7 +5588,9 @@ PRODUCT_NAME = "NB Alpha"; PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; + SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; SWIFT_OBJC_INTERFACE_HEADER_NAME = "NewsBlur-Swift.h"; TARGETED_DEVICE_FAMILY = "1,2,6"; diff --git a/clients/ios/Other Sources/AFNetworking/AFURLRequestSerialization.m b/clients/ios/Other Sources/AFNetworking/AFURLRequestSerialization.m index a4d5d9dfee..7043807cb9 100755 --- a/clients/ios/Other Sources/AFNetworking/AFURLRequestSerialization.m +++ b/clients/ios/Other Sources/AFNetworking/AFURLRequestSerialization.m @@ -171,7 +171,7 @@ - (NSMutableURLRequest *)requestByFinalizingMultipartFormData; #pragma mark - -static NSArray * AFHTTPRequestSerializerObservedKeyPaths() { +static NSArray * AFHTTPRequestSerializerObservedKeyPaths(void) { static NSArray *_AFHTTPRequestSerializerObservedKeyPaths = nil; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ @@ -591,7 +591,7 @@ - (instancetype)copyWithZone:(NSZone *)zone { #pragma mark - -static NSString * AFCreateMultipartFormBoundary() { +static NSString * AFCreateMultipartFormBoundary(void) { return [NSString stringWithFormat:@"Boundary+%08X%08X", arc4random(), arc4random()]; } diff --git a/clients/ios/Other Sources/AFNetworking/AFURLSessionManager.m b/clients/ios/Other Sources/AFNetworking/AFURLSessionManager.m index e96b8e2bbf..357659e485 100755 --- a/clients/ios/Other Sources/AFNetworking/AFURLSessionManager.m +++ b/clients/ios/Other Sources/AFNetworking/AFURLSessionManager.m @@ -28,7 +28,7 @@ #define NSFoundationVersionNumber_With_Fixed_5871104061079552_bug NSFoundationVersionNumber_iOS_8_0 #endif -static dispatch_queue_t url_session_manager_creation_queue() { +static dispatch_queue_t url_session_manager_creation_queue(void) { static dispatch_queue_t af_url_session_manager_creation_queue; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ @@ -49,7 +49,7 @@ static void url_session_manager_create_task_safely(dispatch_block_t block) { } } -static dispatch_queue_t url_session_manager_processing_queue() { +static dispatch_queue_t url_session_manager_processing_queue(void) { static dispatch_queue_t af_url_session_manager_processing_queue; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ @@ -59,7 +59,7 @@ static dispatch_queue_t url_session_manager_processing_queue() { return af_url_session_manager_processing_queue; } -static dispatch_group_t url_session_manager_completion_group() { +static dispatch_group_t url_session_manager_completion_group(void) { static dispatch_group_t af_url_session_manager_completion_group; static dispatch_once_t onceToken; dispatch_once(&onceToken, ^{ diff --git a/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.h b/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.h index 215eafcf74..e9101efaa6 100755 --- a/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.h +++ b/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.h @@ -24,7 +24,7 @@ #import -#if TARGET_OS_IOS +#if TARGET_OS_IOS && !TARGET_OS_MACCATALYST #import diff --git a/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.m b/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.m index cd46916a10..4c5efaacfe 100755 --- a/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.m +++ b/clients/ios/Other Sources/AFNetworking/UIRefreshControl+AFNetworking.m @@ -23,7 +23,7 @@ #import "UIRefreshControl+AFNetworking.h" #import -#if TARGET_OS_IOS +#if TARGET_OS_IOS && !TARGET_OS_MACCATALYST #import "AFURLSessionManager.h" diff --git a/clients/ios/Other Sources/InAppSettingsKit/Controllers/IASKAppSettingsViewController.m b/clients/ios/Other Sources/InAppSettingsKit/Controllers/IASKAppSettingsViewController.m index 0e49853a88..9ffc120fab 100755 --- a/clients/ios/Other Sources/InAppSettingsKit/Controllers/IASKAppSettingsViewController.m +++ b/clients/ios/Other Sources/InAppSettingsKit/Controllers/IASKAppSettingsViewController.m @@ -123,7 +123,7 @@ - (void)createSelections { - (BOOL)isPad { BOOL isPad = NO; #if (__IPHONE_OS_VERSION_MAX_ALLOWED >= 30200) - isPad = [[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad; + isPad = [[UIDevice currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPhone; #endif return isPad; } diff --git a/clients/ios/Other Sources/InAppSettingsKit/Models/IASKSettingsReader.m b/clients/ios/Other Sources/InAppSettingsKit/Models/IASKSettingsReader.m index 789bd9348e..2a511f7f93 100755 --- a/clients/ios/Other Sources/InAppSettingsKit/Models/IASKSettingsReader.m +++ b/clients/ios/Other Sources/InAppSettingsKit/Models/IASKSettingsReader.m @@ -279,7 +279,7 @@ - (NSString *)platformSuffixForInterfaceIdiom:(UIUserInterfaceIdiom) interfaceId switch (interfaceIdiom) { case UIUserInterfaceIdiomPad: return @"~ipad"; case UIUserInterfaceIdiomPhone: return @"~iphone"; - default: return @"~iphone"; + default: return @"~ipad"; } } diff --git a/clients/ios/Other Sources/OnePasswordExtension/OnePasswordExtension.m b/clients/ios/Other Sources/OnePasswordExtension/OnePasswordExtension.m index 3988ffb41e..c5840dae60 100644 --- a/clients/ios/Other Sources/OnePasswordExtension/OnePasswordExtension.m +++ b/clients/ios/Other Sources/OnePasswordExtension/OnePasswordExtension.m @@ -466,7 +466,7 @@ - (void)processExtensionItem:(nullable NSExtensionItem *)extensionItem completio } - (UIActivityViewController *)activityViewControllerForItem:(nonnull NSDictionary *)item viewController:(nonnull UIViewController*)viewController sender:(nullable id)sender typeIdentifier:(nonnull NSString *)typeIdentifier { - NSAssert(NO == ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPad && sender == nil), @"sender must not be nil on iPad."); + NSAssert(NO == ([[UIDevice currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPhone && sender == nil), @"sender must not be nil on iPad."); NSItemProvider *itemProvider = [[NSItemProvider alloc] initWithItem:item typeIdentifier:typeIdentifier]; diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 6361ca2c57..5c97a96af6 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -1,11 +1,12 @@ - + + @@ -14,7 +15,7 @@ - + @@ -110,7 +111,7 @@ - + @@ -239,12 +240,23 @@ + + + + + + + + + + + + - @@ -276,7 +288,6 @@ - @@ -292,7 +303,6 @@ - @@ -336,7 +346,7 @@ - + @@ -405,7 +415,7 @@ - + @@ -420,7 +430,7 @@ - + From 69b36e61f8519e987be5e3d5c7de5bafa94041cb Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 13 Nov 2023 20:01:33 -0600 Subject: [PATCH 02/69] #1247 (Mac Catalyst edition) - Added widget on Mac. - Updated to support latest layout APIs. --- .../Classes/StoryPagesViewController.swift | 2 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 239 ++++++++++++++++-- .../xcschemes/Alpha Widget Extension.xcscheme | 101 ++++++++ .../App.entitlements} | 0 .../Info.plist} | 0 .../Widget Extension/WidgetExtension.swift | 26 +- 6 files changed, 343 insertions(+), 25 deletions(-) create mode 100644 clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme rename clients/ios/{NewsBlur/NewsBlur.entitlements => Resources/App.entitlements} (100%) rename clients/ios/{NewsBlur-iPhone-Info.plist => Resources/Info.plist} (100%) diff --git a/clients/ios/Classes/StoryPagesViewController.swift b/clients/ios/Classes/StoryPagesViewController.swift index c56882b24f..9842e6dcb5 100644 --- a/clients/ios/Classes/StoryPagesViewController.swift +++ b/clients/ios/Classes/StoryPagesViewController.swift @@ -20,6 +20,6 @@ class StoryPagesViewController: StoryPagesObjCViewController { /// Reload the widget timeline. @objc func reloadWidget() { - WidgetCenter.shared.reloadTimelines(ofKind: "Latest") + WidgetCenter.shared.reloadAllTimelines() } } diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 27ceae4c3f..7048d1f136 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -716,6 +716,20 @@ 176129631C630AEB00702FE4 /* mute_feed_on@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 1761295F1C630AEB00702FE4 /* mute_feed_on@2x.png */; }; 1763E2A123B1BCC900BA080C /* WidgetFeed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1763E2A023B1BCC900BA080C /* WidgetFeed.swift */; }; 1763E2A323B1CEB600BA080C /* WidgetBarView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1763E2A223B1CEB600BA080C /* WidgetBarView.swift */; }; + 17654E332B02C08700F61B2B /* WidgetLoader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723388A26BE43EB00610784 /* WidgetLoader.swift */; }; + 17654E342B02C08700F61B2B /* WidgetCache.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723389026BF7CFE00610784 /* WidgetCache.swift */; }; + 17654E352B02C08700F61B2B /* WidgetExtension.swift in Sources */ = {isa = PBXBuildFile; fileRef = 173CB31326BCE94700BA872A /* WidgetExtension.swift */; }; + 17654E362B02C08700F61B2B /* WidgetStory.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723388C26BE440400610784 /* WidgetStory.swift */; }; + 17654E372B02C08700F61B2B /* WidgetBarView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723389326C3775A00610784 /* WidgetBarView.swift */; }; + 17654E382B02C08700F61B2B /* WidgetDebugTimer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17997C5727A8FDD100483E69 /* WidgetDebugTimer.swift */; }; + 17654E392B02C08700F61B2B /* WidgetFeed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723388D26BE440400610784 /* WidgetFeed.swift */; }; + 17654E3A2B02C08700F61B2B /* WidgetStoryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1791C21426C4C7BC00D815AA /* WidgetStoryView.swift */; }; + 17654E3C2B02C08700F61B2B /* SwiftUI.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 173CB31026BCE94700BA872A /* SwiftUI.framework */; }; + 17654E3D2B02C08700F61B2B /* WidgetKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 173CB30E26BCE94700BA872A /* WidgetKit.framework */; }; + 17654E3F2B02C08700F61B2B /* WhitneySSm-Medium-Bas.otf in Resources */ = {isa = PBXBuildFile; fileRef = FF3A3E051BFBBAC600ADC01A /* WhitneySSm-Medium-Bas.otf */; }; + 17654E402B02C08700F61B2B /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 173CB31526BCE94A00BA872A /* Assets.xcassets */; }; + 17654E412B02C08700F61B2B /* WhitneySSm-Book-Bas.otf in Resources */ = {isa = PBXBuildFile; fileRef = FF3A3E011BFBBAC600ADC01A /* WhitneySSm-Book-Bas.otf */; }; + 17654E472B02C0A700F61B2B /* NewsBlur Alpha Widget.appex in Embed Foundation Extensions */ = {isa = PBXBuildFile; fileRef = 17654E452B02C08700F61B2B /* NewsBlur Alpha Widget.appex */; settings = {ATTRIBUTES = (RemoveHeadersOnCopy, ); }; }; 176A5C7A24F8BD1B009E8DF9 /* DetailViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 176A5C7924F8BD1B009E8DF9 /* DetailViewController.swift */; }; 17731A9D23DFAD3D00759A7D /* ImportExportPreferences.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17731A9C23DFAD3D00759A7D /* ImportExportPreferences.swift */; }; 177551D5238E228A00E27818 /* NotificationCenter.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 177551D4238E228A00E27818 /* NotificationCenter.framework */; platformFilter = ios; }; @@ -1342,6 +1356,13 @@ remoteGlobalIDString = 1749390F1C251BFE003D98AA; remoteInfo = "Share Extension"; }; + 17654E482B02C0A800F61B2B /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = 29B97313FDCFA39411CA2CEA /* Project object */; + proxyType = 1; + remoteGlobalIDString = 17654E312B02C08700F61B2B; + remoteInfo = "NewsBlur Alpha Widget"; + }; 177551DD238E228A00E27818 /* PBXContainerItemProxy */ = { isa = PBXContainerItemProxy; containerPortal = 29B97313FDCFA39411CA2CEA /* Project object */; @@ -1359,6 +1380,17 @@ /* End PBXContainerItemProxy section */ /* Begin PBXCopyFilesBuildPhase section */ + 17654E4A2B02C0A800F61B2B /* Embed Foundation Extensions */ = { + isa = PBXCopyFilesBuildPhase; + buildActionMask = 2147483647; + dstPath = ""; + dstSubfolderSpec = 13; + files = ( + 17654E472B02C0A700F61B2B /* NewsBlur Alpha Widget.appex in Embed Foundation Extensions */, + ); + name = "Embed Foundation Extensions"; + runOnlyForDeploymentPostprocessing = 0; + }; FF8A94A31DE3BB77000A4C31 /* Embed Foundation Extensions */ = { isa = PBXCopyFilesBuildPhase; buildActionMask = 2147483647; @@ -1473,6 +1505,7 @@ 1761295F1C630AEB00702FE4 /* mute_feed_on@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "mute_feed_on@2x.png"; sourceTree = ""; }; 1763E2A023B1BCC900BA080C /* WidgetFeed.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidgetFeed.swift; sourceTree = ""; }; 1763E2A223B1CEB600BA080C /* WidgetBarView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidgetBarView.swift; sourceTree = ""; }; + 17654E452B02C08700F61B2B /* NewsBlur Alpha Widget.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = "NewsBlur Alpha Widget.appex"; sourceTree = BUILT_PRODUCTS_DIR; }; 176A5C7924F8BD1B009E8DF9 /* DetailViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = DetailViewController.swift; sourceTree = ""; }; 17731A9C23DFAD3D00759A7D /* ImportExportPreferences.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ImportExportPreferences.swift; sourceTree = ""; }; 177551D3238E228A00E27818 /* Old NewsBlur Latest.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = "Old NewsBlur Latest.appex"; sourceTree = BUILT_PRODUCTS_DIR; }; @@ -1766,7 +1799,7 @@ 784B50EA127E3F68008F90EA /* LoginViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = LoginViewController.h; sourceTree = ""; }; 784B50EB127E3F68008F90EA /* LoginViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = LoginViewController.m; sourceTree = ""; }; 788EF355127E5BC80088EDC5 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; }; - 8D1107310486CEB800E47090 /* NewsBlur-iPhone-Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = "NewsBlur-iPhone-Info.plist"; plistStructureDefinitionIdentifier = "com.apple.xcode.plist.structure-definition.iphone.info-plist"; sourceTree = ""; }; + 8D1107310486CEB800E47090 /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; plistStructureDefinitionIdentifier = "com.apple.xcode.plist.structure-definition.iphone.info-plist"; sourceTree = ""; }; E160F0551C9DAC2C00CB96DF /* UIViewController+HidePopover.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = "UIViewController+HidePopover.h"; path = "Other Sources/UIViewController+HidePopover.h"; sourceTree = ""; }; E160F0561C9DAC2C00CB96DF /* UIViewController+HidePopover.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = "UIViewController+HidePopover.m"; path = "Other Sources/UIViewController+HidePopover.m"; sourceTree = ""; }; E1C44B09200147ED002128AD /* StoryTitleAttributedString.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = StoryTitleAttributedString.h; sourceTree = ""; }; @@ -2071,7 +2104,7 @@ FF8A949A1DE3BB77000A4C31 /* NotificationService.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = NotificationService.m; sourceTree = ""; }; FF8A949C1DE3BB77000A4C31 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; FF8AFE561CAC73C9005D9B40 /* unread_blue@3x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "unread_blue@3x.png"; sourceTree = ""; }; - FF8C49921BBC9D140010D894 /* NewsBlur.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.xml; name = NewsBlur.entitlements; path = NewsBlur/NewsBlur.entitlements; sourceTree = ""; }; + FF8C49921BBC9D140010D894 /* App.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = App.entitlements; sourceTree = ""; }; FF8D1EA51BAA304E00725D8A /* Reachability.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = Reachability.h; sourceTree = ""; }; FF8D1EA61BAA304E00725D8A /* Reachability.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = Reachability.m; sourceTree = ""; }; FF8D1EBD1BAA311000725D8A /* SBJson4.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = SBJson4.h; sourceTree = ""; }; @@ -2313,6 +2346,15 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + 17654E3B2B02C08700F61B2B /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + 17654E3C2B02C08700F61B2B /* SwiftUI.framework in Frameworks */, + 17654E3D2B02C08700F61B2B /* WidgetKit.framework in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; 177551D0238E228A00E27818 /* Frameworks */ = { isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; @@ -2513,6 +2555,7 @@ 177551D3238E228A00E27818 /* Old NewsBlur Latest.appex */, 173CB30D26BCE94700BA872A /* NewsBlur Widget.appex */, 175792E42930605500490924 /* NB Alpha.app */, + 17654E452B02C08700F61B2B /* NewsBlur Alpha Widget.appex */, ); name = Products; sourceTree = ""; @@ -2531,8 +2574,6 @@ 173CB31226BCE94700BA872A /* Widget Extension */, 29B97323FDCFA39411CA2CEA /* Frameworks */, 19C28FACFE9D520D11CA2CBB /* Products */, - FF8C49921BBC9D140010D894 /* NewsBlur.entitlements */, - 8D1107310486CEB800E47090 /* NewsBlur-iPhone-Info.plist */, ); name = CustomTemplate; sourceTree = ""; @@ -2644,6 +2685,8 @@ 431B857615A132B600DCE497 /* Images */, FFF1E4C717750BDD00BF59D3 /* Settings.bundle */, E1D123FD1C66753D00434F40 /* Localizable.stringsdict */, + FF8C49921BBC9D140010D894 /* App.entitlements */, + 8D1107310486CEB800E47090 /* Info.plist */, ); path = Resources; sourceTree = ""; @@ -3696,10 +3739,12 @@ 175790622930605500490924 /* Resources */, 175792252930605500490924 /* Sources */, 175792C12930605500490924 /* Frameworks */, + 17654E4A2B02C0A800F61B2B /* Embed Foundation Extensions */, ); buildRules = ( ); dependencies = ( + 17654E492B02C0A800F61B2B /* PBXTargetDependency */, ); name = "NewsBlur Alpha"; packageProductDependencies = ( @@ -3708,6 +3753,23 @@ productReference = 175792E42930605500490924 /* NB Alpha.app */; productType = "com.apple.product-type.application"; }; + 17654E312B02C08700F61B2B /* NewsBlur Alpha Widget */ = { + isa = PBXNativeTarget; + buildConfigurationList = 17654E422B02C08700F61B2B /* Build configuration list for PBXNativeTarget "NewsBlur Alpha Widget" */; + buildPhases = ( + 17654E322B02C08700F61B2B /* Sources */, + 17654E3B2B02C08700F61B2B /* Frameworks */, + 17654E3E2B02C08700F61B2B /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = "NewsBlur Alpha Widget"; + productName = WidgetExtension; + productReference = 17654E452B02C08700F61B2B /* NewsBlur Alpha Widget.appex */; + productType = "com.apple.product-type.app-extension"; + }; 177551D2238E228A00E27818 /* Old Widget Extension */ = { isa = PBXNativeTarget; buildConfigurationList = 177551E2238E228A00E27818 /* Build configuration list for PBXNativeTarget "Old Widget Extension" */; @@ -3864,6 +3926,7 @@ FF8A94961DE3BB77000A4C31 /* Story Notification Service Extension */, 177551D2238E228A00E27818 /* Old Widget Extension */, 173CB30C26BCE94700BA872A /* NewsBlur Widget */, + 17654E312B02C08700F61B2B /* NewsBlur Alpha Widget */, ); }; /* End PBXProject section */ @@ -4349,6 +4412,16 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + 17654E3E2B02C08700F61B2B /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 17654E3F2B02C08700F61B2B /* WhitneySSm-Medium-Bas.otf in Resources */, + 17654E402B02C08700F61B2B /* Assets.xcassets in Resources */, + 17654E412B02C08700F61B2B /* WhitneySSm-Book-Bas.otf in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; 177551D1238E228A00E27818 /* Resources */ = { isa = PBXResourcesBuildPhase; buildActionMask = 2147483647; @@ -5025,6 +5098,21 @@ ); runOnlyForDeploymentPostprocessing = 0; }; + 17654E322B02C08700F61B2B /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + 17654E332B02C08700F61B2B /* WidgetLoader.swift in Sources */, + 17654E342B02C08700F61B2B /* WidgetCache.swift in Sources */, + 17654E352B02C08700F61B2B /* WidgetExtension.swift in Sources */, + 17654E362B02C08700F61B2B /* WidgetStory.swift in Sources */, + 17654E372B02C08700F61B2B /* WidgetBarView.swift in Sources */, + 17654E382B02C08700F61B2B /* WidgetDebugTimer.swift in Sources */, + 17654E392B02C08700F61B2B /* WidgetFeed.swift in Sources */, + 17654E3A2B02C08700F61B2B /* WidgetStoryView.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; 177551CF238E228A00E27818 /* Sources */ = { isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; @@ -5231,6 +5319,11 @@ target = 1749390F1C251BFE003D98AA /* Share Extension */; targetProxy = 174939191C251BFE003D98AA /* PBXContainerItemProxy */; }; + 17654E492B02C0A800F61B2B /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = 17654E312B02C08700F61B2B /* NewsBlur Alpha Widget */; + targetProxy = 17654E482B02C0A800F61B2B /* PBXContainerItemProxy */; + }; 177551DE238E228A00E27818 /* PBXTargetDependency */ = { isa = PBXTargetDependency; platformFilter = ios; @@ -5503,7 +5596,7 @@ ALWAYS_SEARCH_USER_PATHS = YES; ASSETCATALOG_COMPILER_APPICON_NAME = AppIconDev; CLANG_ENABLE_OBJC_ARC = YES; - CODE_SIGN_ENTITLEMENTS = NewsBlur/NewsBlur.entitlements; + CODE_SIGN_ENTITLEMENTS = Resources/App.entitlements; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; @@ -5519,7 +5612,7 @@ GCC_THUMB_SUPPORT = NO; GCC_VERSION = ""; HEADER_SEARCH_PATHS = ""; - INFOPLIST_FILE = "NewsBlur-iPhone-Info.plist"; + INFOPLIST_FILE = Resources/Info.plist; LAUNCH_SCREEN_NAME = LaunchScreenDev; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( @@ -5555,7 +5648,7 @@ ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_APPICON_NAME = AppIconDev; CLANG_ENABLE_OBJC_ARC = YES; - CODE_SIGN_ENTITLEMENTS = NewsBlur/NewsBlur.entitlements; + CODE_SIGN_ENTITLEMENTS = Resources/App.entitlements; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; @@ -5570,7 +5663,7 @@ GCC_THUMB_SUPPORT = NO; GCC_VERSION = ""; HEADER_SEARCH_PATHS = ""; - INFOPLIST_FILE = "NewsBlur-iPhone-Info.plist"; + INFOPLIST_FILE = Resources/Info.plist; LAUNCH_SCREEN_NAME = LaunchScreenDev; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( @@ -5598,6 +5691,105 @@ }; name = Release; }; + 17654E432B02C08700F61B2B /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = "Widget Extension/WidgetExtension.entitlements"; + CODE_SIGN_IDENTITY = "iPhone Developer"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 146; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = HR7P97SD72; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + INFOPLIST_FILE = "Widget Extension/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@executable_path/../../Frameworks", + ); + MARKETING_VERSION = 12.1.0; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NB-Alpha.widget"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SKIP_INSTALL = YES; + SUPPORTS_MACCATALYST = YES; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2,6"; + }; + name = Debug; + }; + 17654E442B02C08700F61B2B /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + ASSETCATALOG_COMPILER_WIDGET_BACKGROUND_COLOR_NAME = WidgetBackground; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CODE_SIGN_ENTITLEMENTS = "Widget Extension/WidgetExtension.entitlements"; + CODE_SIGN_IDENTITY = "iPhone Developer"; + "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 146; + DEVELOPMENT_TEAM = HR7P97SD72; + ENABLE_NS_ASSERTIONS = NO; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + INFOPLIST_FILE = "Widget Extension/Info.plist"; + IPHONEOS_DEPLOYMENT_TARGET = 17.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@executable_path/../../Frameworks", + ); + MARKETING_VERSION = 12.1.0; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NB-Alpha.widget"; + PRODUCT_NAME = "$(TARGET_NAME)"; + SKIP_INSTALL = YES; + SUPPORTS_MACCATALYST = YES; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2,6"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; 177551E0238E228A00E27818 /* Debug */ = { isa = XCBuildConfiguration; buildSettings = { @@ -5702,11 +5894,10 @@ ALWAYS_SEARCH_USER_PATHS = YES; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_OBJC_ARC = YES; - CODE_SIGN_ENTITLEMENTS = NewsBlur/NewsBlur.entitlements; + CODE_SIGN_ENTITLEMENTS = Resources/App.entitlements; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5719,14 +5910,13 @@ GCC_THUMB_SUPPORT = NO; GCC_VERSION = ""; HEADER_SEARCH_PATHS = ""; - INFOPLIST_FILE = "NewsBlur-iPhone-Info.plist"; + INFOPLIST_FILE = Resources/Info.plist; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "\"$(SRCROOT)\"", "\"$(SRCROOT)/Other Sources\"", ); - MARKETING_VERSION = 12.1.0; OTHER_CPLUSPLUSFLAGS = "$(OTHER_CFLAGS)"; OTHER_LDFLAGS = ( "-lsqlite3.0", @@ -5738,7 +5928,8 @@ PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; STRIP_INSTALLED_PRODUCT = NO; - SUPPORTS_MACCATALYST = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; TARGETED_DEVICE_FAMILY = "1,2,6"; "WARNING_CFLAGS[arch=*]" = "-Wall"; @@ -5752,11 +5943,10 @@ ALWAYS_SEARCH_USER_PATHS = NO; ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; CLANG_ENABLE_OBJC_ARC = YES; - CODE_SIGN_ENTITLEMENTS = NewsBlur/NewsBlur.entitlements; + CODE_SIGN_ENTITLEMENTS = Resources/App.entitlements; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5768,14 +5958,13 @@ GCC_THUMB_SUPPORT = NO; GCC_VERSION = ""; HEADER_SEARCH_PATHS = ""; - INFOPLIST_FILE = "NewsBlur-iPhone-Info.plist"; + INFOPLIST_FILE = Resources/Info.plist; LD_RUNPATH_SEARCH_PATHS = "@executable_path/Frameworks"; LIBRARY_SEARCH_PATHS = ( "$(inherited)", "\"$(SRCROOT)\"", "\"$(SRCROOT)/Other Sources\"", ); - MARKETING_VERSION = 12.1.0; OTHER_LDFLAGS = ( "-lsqlite3.0", "-ObjC", @@ -5785,7 +5974,8 @@ PRODUCT_NAME = NewsBlur; PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; - SUPPORTS_MACCATALYST = YES; + SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; + SUPPORTS_MACCATALYST = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; TARGETED_DEVICE_FAMILY = "1,2,6"; VALIDATE_PRODUCT = YES; @@ -5818,7 +6008,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 150; + CURRENT_PROJECT_VERSION = 151; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; @@ -5873,7 +6063,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 150; + CURRENT_PROJECT_VERSION = 151; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = "compiler-default"; @@ -6017,6 +6207,15 @@ defaultConfigurationIsVisible = 0; defaultConfigurationName = Release; }; + 17654E422B02C08700F61B2B /* Build configuration list for PBXNativeTarget "NewsBlur Alpha Widget" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + 17654E432B02C08700F61B2B /* Debug */, + 17654E442B02C08700F61B2B /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; 177551E2238E228A00E27818 /* Build configuration list for PBXNativeTarget "Old Widget Extension" */ = { isa = XCConfigurationList; buildConfigurations = ( diff --git a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme new file mode 100644 index 0000000000..1a6ded7639 --- /dev/null +++ b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/clients/ios/NewsBlur/NewsBlur.entitlements b/clients/ios/Resources/App.entitlements similarity index 100% rename from clients/ios/NewsBlur/NewsBlur.entitlements rename to clients/ios/Resources/App.entitlements diff --git a/clients/ios/NewsBlur-iPhone-Info.plist b/clients/ios/Resources/Info.plist similarity index 100% rename from clients/ios/NewsBlur-iPhone-Info.plist rename to clients/ios/Resources/Info.plist diff --git a/clients/ios/Widget Extension/WidgetExtension.swift b/clients/ios/Widget Extension/WidgetExtension.swift index 88ba7c09f9..c04ff68aaa 100644 --- a/clients/ios/Widget Extension/WidgetExtension.swift +++ b/clients/ios/Widget Extension/WidgetExtension.swift @@ -85,6 +85,8 @@ struct SimpleEntry: TimelineEntry { struct WidgetEntryView : View { var entry: Provider.Entry + @Environment(\.widgetRenderingMode) var renderingMode + @Environment(\.widgetContentMargins) var margins @Environment(\.colorScheme) var colorScheme @Environment(\.widgetFamily) private var family @@ -94,14 +96,19 @@ struct WidgetEntryView : View { var body: some View { ZStack { - Color("WidgetBackground") - .ignoresSafeArea() +// switch renderingMode { +// case .accented: +// case .fullColor: +// case .vibrant: +// break +// } if let error = entry.cache.error { Link(destination: URL(string: "newsblurwidget://?error=\(error)")!) { Text(message(for: error)) .font(.headline) - .foregroundColor(.secondary) + .foregroundStyle(.secondary) + .containerBackground(.fill, for: .widget) } } else { VStack(alignment: .leading, spacing: 0, content: { @@ -112,9 +119,19 @@ struct WidgetEntryView : View { Divider() } }) - .widgetURL(URL(string: "newsblurwidget://open")) + .padding(.top, 5) + .padding(.bottom, 5) + .containerBackground(for: .widget) { + Color("WidgetBackground") + } + .widgetURL(URL(string: "newsblurwidget://open")) } } +// .environment(\.colorScheme, colorScheme) + .containerBackground(for: .widget) { + Color("WidgetBackground") +// .ignoresSafeArea() + } } func message(for error: WidgetCacheError) -> String { @@ -142,6 +159,7 @@ struct WidgetExtension: Widget { .configurationDisplayName("NewsBlur") .description("The latest stories from NewsBlur.") .supportedFamilies([.systemMedium, .systemLarge]) + .contentMarginsDisabled() } } From b4e9c05623fa9906b58b8fd7257dd4fb60769a2b Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 13 Nov 2023 20:56:52 -0600 Subject: [PATCH 03/69] #1247 (Mac Catalyst edition) - Widget tweaks --- .../Classes/FeedDetailObjCViewController.m | 2 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 20 ------------------- 2 files changed, 1 insertion(+), 21 deletions(-) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 8c0f1ff452..36e13c5544 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -1497,7 +1497,7 @@ - (void)testForTryFeed { // NSIndexPath *indexPath = [NSIndexPath indexPathForRow:locationOfStoryId inSection:0]; NSIndexPath *indexPath = [self indexPathForStoryLocation:locationOfStoryId]; - if (self.isLegacyTable && self.storyTitlesTable.window != nil) { + if (self.isLegacyTable && self.storyTitlesTable.window != nil && indexPath.row < [self.storyTitlesTable numberOfRowsInSection:0]) { [self tableView:self.storyTitlesTable selectRowAtIndexPath:indexPath animated:NO scrollPosition:UITableViewScrollPositionMiddle]; diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 7048d1f136..edd193a22e 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -5408,7 +5408,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = HR7P97SD72; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5426,7 +5425,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = com.newsblur.NewsBlur.widget; @@ -5459,7 +5457,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_NS_ASSERTIONS = NO; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5471,7 +5468,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = com.newsblur.NewsBlur.widget; @@ -5506,7 +5502,6 @@ CODE_SIGN_ENTITLEMENTS = "Share Extension/Share Extension.entitlements"; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - CURRENT_PROJECT_VERSION = 146; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_STRICT_OBJC_MSGSEND = YES; @@ -5529,7 +5524,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.Share-Extension"; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -5561,7 +5555,6 @@ CODE_SIGN_ENTITLEMENTS = "Share Extension/Share Extension.entitlements"; CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_NS_ASSERTIONS = NO; ENABLE_STRICT_OBJC_MSGSEND = YES; @@ -5578,7 +5571,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = NO; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.Share-Extension"; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -5711,7 +5703,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = HR7P97SD72; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5730,7 +5721,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NB-Alpha.widget"; @@ -5763,7 +5753,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_NS_ASSERTIONS = NO; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5776,7 +5765,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NB-Alpha.widget"; @@ -5811,7 +5799,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = HR7P97SD72; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5829,7 +5816,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.old-widget"; @@ -5863,7 +5849,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_NS_ASSERTIONS = NO; GCC_C_LANGUAGE_STANDARD = gnu11; @@ -5875,7 +5860,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.old-widget"; @@ -6108,7 +6092,6 @@ CODE_SIGN_IDENTITY = "Apple Development"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = HR7P97SD72; GCC_C_LANGUAGE_STANDARD = gnu99; @@ -6126,7 +6109,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = YES; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.Story-Notification-Service-Extension"; PRODUCT_NAME = "$(TARGET_NAME)"; @@ -6153,7 +6135,6 @@ CODE_SIGN_IDENTITY = "Apple Development"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; CODE_SIGN_STYLE = Automatic; - CURRENT_PROJECT_VERSION = 146; DEVELOPMENT_TEAM = HR7P97SD72; ENABLE_NS_ASSERTIONS = NO; GCC_C_LANGUAGE_STANDARD = gnu99; @@ -6165,7 +6146,6 @@ "@executable_path/Frameworks", "@executable_path/../../Frameworks", ); - MARKETING_VERSION = 12.1.0; MTL_ENABLE_DEBUG_INFO = NO; PRODUCT_BUNDLE_IDENTIFIER = "com.newsblur.NewsBlur.Story-Notification-Service-Extension"; PRODUCT_NAME = "$(TARGET_NAME)"; From b4c4aa51473a77267858ef7ed6b55ca4b50af877 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Tue, 14 Nov 2023 21:48:22 -0600 Subject: [PATCH 04/69] #1247 (Mac Catalyst edition) - Added the user info, with several tweaks to fit. - Getting the avatar to show up was tricky. - Nav bar buttons on the right. --- .../Classes/FeedDetailObjCViewController.m | 14 ++++++++++ clients/ios/Classes/FeedsObjCViewController.h | 3 +++ clients/ios/Classes/FeedsObjCViewController.m | 27 +++++++++++++++++-- .../Classes/StoryPagesObjCViewController.m | 18 +++++++++++++ 4 files changed, 60 insertions(+), 2 deletions(-) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 36e13c5544..8884369aa9 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -167,6 +167,13 @@ - (void)viewDidLoad { markReadLongPress.delegate = self; [view addGestureRecognizer:markReadLongPress]; +#if TARGET_OS_MACCATALYST + if (@available(macCatalyst 16.0, *)) { + settingsBarButton.hidden = YES; + feedMarkReadButton.hidden = YES; + } +#endif + titleImageBarButton = [UIBarButtonItem alloc]; UILongPressGestureRecognizer *tableLongPress = [[UILongPressGestureRecognizer alloc] @@ -1304,6 +1311,13 @@ - (void)finishedLoadingFeed:(NSDictionary *)results feedPage:(NSInteger)feedPage NSLog(@"finishedLoadingFeed: %@", receivedFeedId); // log +#if TARGET_OS_MACCATALYST + if (@available(macCatalyst 16.0, *)) { + settingsBarButton.hidden = NO; + feedMarkReadButton.hidden = NO; + } +#endif + self.pageFinished = NO; [self renderStories:confirmedNewStories]; diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index bcac146ba8..e5f150d21f 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -62,6 +62,9 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic) IBOutlet UIBarButtonItem * addBarButton; @property (nonatomic) IBOutlet UIBarButtonItem * settingsBarButton; @property (nonatomic) IBOutlet UIBarButtonItem * activitiesButton; +#if TARGET_OS_MACCATALYST +@property (nonatomic) IBOutlet UIBarButtonItem * userBarButton; +#endif @property (nonatomic) IBOutlet UIButton *userAvatarButton; @property (nonatomic) IBOutlet UILabel *neutralCount; @property (nonatomic) IBOutlet UILabel *positiveCount; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index acb602b4a9..33f0bc794d 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -2863,6 +2863,9 @@ - (void)resetToolbar { } - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { +#if TARGET_OS_MACCATALYST + int yOffset = -5; +#else if (!orientation) { orientation = self.view.window.windowScene.interfaceOrientation; } @@ -2874,6 +2877,7 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { } int yOffset = isShort ? 0 : 6; +#endif UIView *userInfoView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, self.navigationController.navigationBar.frame.size.width, @@ -2886,9 +2890,19 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { target:self action:@selector((showUserProfile))]; userAvatarButton.pointerInteractionEnabled = YES; userAvatarButton.accessibilityLabel = @"User info"; +#if TARGET_OS_MACCATALYST + userAvatarButton.accessibilityHint = @"Double-click for information about your account."; + CGRect frame = userAvatarButton.frame; + frame.origin.y = -8; + frame.size.width = 38; + frame.size.height = 38; + userAvatarButton.frame = frame; +#else userAvatarButton.accessibilityHint = @"Double-tap for information about your account."; UIEdgeInsets insets = UIEdgeInsetsMake(0, -10, 10, 0); userAvatarButton.contentEdgeInsets = insets; +#endif +// userAvatarButton.backgroundColor = UIColor.blueColor; NSMutableURLRequest *avatarRequest = [NSMutableURLRequest requestWithURL:imageURL]; [avatarRequest addValue:@"image/*" forHTTPHeaderField:@"Accept"]; @@ -2899,8 +2913,11 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { typeof(weakSelf) __strong strongSelf = weakSelf; image = [Utilities roundCorneredImage:image radius:6 convertToSize:CGSizeMake(38, 38)]; image = [image imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; - [(UIButton *)strongSelf.userAvatarButton setImage:image forState:UIControlStateNormal]; - + UIButton *button = strongSelf.userAvatarButton; + [button setImage:image forState:UIControlStateNormal]; +#if TARGET_OS_MACCATALYST + strongSelf.appDelegate.feedDetailViewController.navigationItem.leftBarButtonItems = @[[[UIBarButtonItem alloc] initWithCustomView:[UIView new]]]; +#endif } failure:^(NSURLRequest * _Nonnull request, NSHTTPURLResponse * _Nonnull response, NSError * _Nonnull error) { NSLog(@"Could not fetch user avatar: %@", error); }]; @@ -2941,7 +2958,13 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { // userInfoView.backgroundColor = UIColor.blueColor; +#if TARGET_OS_MACCATALYST + self.userBarButton = [[UIBarButtonItem alloc] initWithCustomView:userInfoView]; +// userInfoView.backgroundColor = UIColor.redColor; + self.navigationItem.leftBarButtonItem = self.userBarButton; +#else self.navigationItem.titleView = userInfoView; +#endif } - (void)refreshHeaderCounts { diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index 8d54100d0d..8b3d92c49c 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -1225,6 +1225,10 @@ - (void)updatePageWithActiveStory:(NSInteger)location updateFeedDetail:(BOOL)upd [appDelegate.storiesCollection pushReadStory:[appDelegate.activeStory objectForKey:@"story_hash"]]; +#if TARGET_OS_MACCATALYST + self.appDelegate.detailViewController.navigationItem.leftBarButtonItems = @[[[UIBarButtonItem alloc] initWithCustomView:[UIView new]]]; +#endif + if (!self.isPhone) { if (appDelegate.detailViewController.storyTitlesOnLeft) { appDelegate.detailViewController.navigationItem.rightBarButtonItems = [NSArray arrayWithObjects: @@ -1327,6 +1331,13 @@ - (void)setTextButton:(StoryDetailViewController *)storyViewController { fontSettingsButton.enabled = YES; originalStoryButton.enabled = YES; + +#if TARGET_OS_MACCATALYST + if (@available(macCatalyst 16.0, *)) { + fontSettingsButton.hidden = NO; + originalStoryButton.hidden = NO; + } +#endif } else { [buttonText setEnabled:NO]; [buttonText setAlpha:.4]; @@ -1335,6 +1346,13 @@ - (void)setTextButton:(StoryDetailViewController *)storyViewController { fontSettingsButton.enabled = NO; originalStoryButton.enabled = NO; + +#if TARGET_OS_MACCATALYST + if (@available(macCatalyst 16.0, *)) { + fontSettingsButton.hidden = YES; + originalStoryButton.hidden = YES; + } +#endif } [buttonSend setBackgroundImage:[[ThemeManager themeManager] themedImage:[UIImage imageNamed:@"traverse_send.png"]] From f316f39806795bbfb57aa3ebdef0a47c51deead2 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Wed, 15 Nov 2023 21:30:14 -0600 Subject: [PATCH 05/69] #1247 (Mac Catalyst edition) - Migrated to modern window scene support. - Customized the menu bar, with Theme submenu, and new Site and Story menus (some items not implemented yet). --- .../Classes/FeedDetailObjCViewController.h | 10 +- .../Classes/FeedDetailObjCViewController.m | 20 +- clients/ios/Classes/FeedsObjCViewController.h | 2 + clients/ios/Classes/FeedsObjCViewController.m | 11 ++ clients/ios/Classes/NewsBlurAppDelegate.h | 1 + clients/ios/Classes/NewsBlurAppDelegate.m | 30 +-- clients/ios/Classes/SceneDelegate.swift | 20 ++ .../Classes/StoryPagesObjCViewController.h | 5 + .../Classes/StoryPagesObjCViewController.m | 12 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 17 +- .../xcschemes/NewsBlur Alpha.xcscheme | 2 +- .../xcshareddata/xcschemes/NewsBlur.xcscheme | 2 +- .../xcschemes/Old Widget Extension.xcscheme | 2 +- .../xcschemes/Share Extension.xcscheme | 2 +- ...ry Notification Service Extension.xcscheme | 2 +- .../xcschemes/Widget Extension.xcscheme | 2 +- .../Contents.json | 0 .../add.svg | 0 clients/ios/Resources/Info.plist | 19 ++ .../ios/Resources/MainInterface.storyboard | 175 +++++++++++++++++- 20 files changed, 283 insertions(+), 51 deletions(-) create mode 100644 clients/ios/Classes/SceneDelegate.swift rename clients/ios/NewsBlur/Images.xcassets/{add.imageset => add-item.imageset}/Contents.json (100%) rename clients/ios/NewsBlur/Images.xcassets/{add.imageset => add-item.imageset}/add.svg (100%) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.h b/clients/ios/Classes/FeedDetailObjCViewController.h index 05751935ff..d473053c87 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.h +++ b/clients/ios/Classes/FeedDetailObjCViewController.h @@ -117,13 +117,15 @@ - (IBAction)doOpenSettingsMenu:(id)sender; - (void)deleteSite; - (void)deleteFolder; -- (void)muteSite; -- (void)openTrainSite; +- (IBAction)muteSite; +- (IBAction)openTrainSite; +- (IBAction)openNotifications:(id)sender; - (void)openNotificationsWithFeed:(NSString *)feedId; -- (void)openRenameSite; +- (IBAction)openStatistics:(id)sender; +- (IBAction)openRenameSite; - (void)showUserProfile; - (void)changeActiveFeedDetailRow; -- (void)instafetchFeed; +- (IBAction)instafetchFeed; - (void)changeActiveStoryTitleCellLayout; - (void)didSelectItemAtIndexPath:(NSIndexPath *)indexPath; - (void)loadFaviconsFromActiveFeed; diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 8884369aa9..f0bc0e1012 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -2824,7 +2824,7 @@ - (void)deleteFolder { }]; } -- (void)muteSite { +- (IBAction)muteSite { [MBProgressHUD hideHUDForView:self.view animated:YES]; MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.view animated:YES]; HUD.labelText = @"Muting..."; @@ -2977,7 +2977,7 @@ - (void)openMoveView:(UINavigationController *)menuNavigationController { [menuNavigationController showViewController:viewController sender:self]; } -- (void)openTrainSite { +- (IBAction)openTrainSite { [appDelegate openTrainSite]; } @@ -2987,15 +2987,27 @@ - (void)toggleHiddenStories { [self reload]; } +- (IBAction)openNotifications:(id)sender { + NSString *feedId = [self.appDelegate.storiesCollection.activeFeed objectForKey:@"id"]; + + [appDelegate openNotificationsWithFeed:feedId]; +} + - (void)openNotificationsWithFeed:(NSString *)feedId { [appDelegate openNotificationsWithFeed:feedId]; } +- (IBAction)openStatistics:(id)sender { + NSString *feedId = [self.appDelegate.storiesCollection.activeFeed objectForKey:@"id"]; + + [appDelegate openStatisticsWithFeed:feedId sender:settingsBarButton]; +} + - (void)openStatisticsWithFeed:(NSString *)feedId { [appDelegate openStatisticsWithFeed:feedId sender:settingsBarButton]; } -- (void)openRenameSite { +- (IBAction)openRenameSite { NSString *title = [NSString stringWithFormat:@"Rename \"%@\"", appDelegate.storiesCollection.isRiverView ? [appDelegate extractFolderName:appDelegate.storiesCollection.activeFolder] : [appDelegate.storiesCollection.activeFeed objectForKey:@"feed_title"]]; NSString *subtitle = (appDelegate.storiesCollection.isRiverView ? @@ -3147,7 +3159,7 @@ - (void)failedMarkAsUnread:(NSDictionary *)params { // called when the user taps refresh button -- (void)instafetchFeed { +- (IBAction)instafetchFeed { NSString *urlString = [NSString stringWithFormat:@"%@/reader/refresh_feed/%@", self.appDelegate.url, diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index e5f150d21f..1cc428287c 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -99,7 +99,9 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { - (void)didSelectSectionHeader:(UIButton *)button; - (void)didSelectSectionHeaderWithTag:(NSInteger)tag; - (void)selectNextFolderOrFeed; +- (IBAction)reloadFeeds:(id)sender; - (IBAction)selectIntelligence; +- (IBAction)chooseTheme:(id)sender; - (void)markFeedRead:(NSString *)feedId cutoffDays:(NSInteger)days; - (void)markFeedsRead:(NSArray *)feedIds cutoffDays:(NSInteger)days; - (void)markEverythingReadWithDays:(NSInteger)days; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index 33f0bc794d..f1242056e7 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -2727,6 +2727,10 @@ - (void)refresh:(UIRefreshControl *)refreshControl { } #endif +- (IBAction)reloadFeeds:(id)sender { + [appDelegate reloadFeedsView:NO]; +} + - (void)finishRefresh { self.inPullToRefresh_ = NO; #if !TARGET_OS_MACCATALYST @@ -2856,6 +2860,13 @@ - (void)finishRefreshingFeedList:(NSDictionary *)results feedId:(NSString *)feed }); } +- (IBAction)chooseTheme:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [ThemeManager themeManager].theme = string; +} + - (void)resetToolbar { // self.navigationItem.leftBarButtonItem = nil; self.navigationItem.titleView = nil; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.h b/clients/ios/Classes/NewsBlurAppDelegate.h index 0ca8c817b7..c4fb4b5ba0 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.h +++ b/clients/ios/Classes/NewsBlurAppDelegate.h @@ -295,6 +295,7 @@ SFSafariViewControllerDelegate> { - (void)registerDefaultsFromSettingsBundle; - (void)finishBackground; +- (void)prepareViewControllers; - (void)showFirstTimeUser; - (void)showLogin; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index ce9776de9c..fea6707eb6 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -202,25 +202,10 @@ + (instancetype)shared { - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions:(NSDictionary *)launchOptions { [self registerDefaultsFromSettingsBundle]; - // CATALYST: this is now handled by the storyboard. -// self.navigationController.delegate = self; -// self.navigationController.viewControllers = [NSArray arrayWithObject:self.feedsViewController]; self.storiesCollection = [StoriesCollection new]; -// if ([[UIDevice currentDevice] userInterfaceIdiom] != UIUserInterfaceIdiomPhond) { -// self.window.rootViewController = self.masterContainerViewController; -// } else { -// self.window.rootViewController = self.navigationController; -// } - - [self prepareViewControllers]; - [self clearNetworkManager]; - [window makeKeyAndVisible]; - - [[ThemeManager themeManager] prepareForWindow:self.window]; - [self createDatabaseConnection]; cachedFavicons = [[PINCache alloc] initWithName:@"NBFavicons"]; @@ -236,8 +221,6 @@ - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions: // Uncomment below line to test image caching // [[NSURLCache sharedURLCache] removeAllCachedResponses]; - [feedsViewController view]; - [feedsViewController loadOfflineFeeds:NO]; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) { [self setupReachability]; @@ -249,8 +232,6 @@ - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions: } }); -// [self showFirstTimeUser]; - return YES; } @@ -424,10 +405,6 @@ - (BOOL)handleShortcutItem:(UIApplicationShortcutItem *)shortcutItem { return handled; } -- (void)buildMenuWithBuilder:(id)builder { - -} - - (void)delayedAddSite { [self.feedsViewController tapAddSite:self]; } @@ -1221,6 +1198,13 @@ - (void)prepareViewControllers { self.firstTimeUserAddNewsBlurViewController = [FirstTimeUserAddNewsBlurViewController new]; [self updateSplitBehavior:NO]; + + [window makeKeyAndVisible]; + + [[ThemeManager themeManager] prepareForWindow:self.window]; + + [feedsViewController view]; + [feedsViewController loadOfflineFeeds:NO]; } - (StoryPagesViewController *)storyPagesViewController { diff --git a/clients/ios/Classes/SceneDelegate.swift b/clients/ios/Classes/SceneDelegate.swift new file mode 100644 index 0000000000..0743d599c6 --- /dev/null +++ b/clients/ios/Classes/SceneDelegate.swift @@ -0,0 +1,20 @@ +// +// SceneDelegate.swift +// NewsBlur +// +// Created by David Sinclair on 2023-11-15. +// Copyright © 2023 NewsBlur. All rights reserved. +// + +import UIKit + +class SceneDelegate: UIResponder, UIWindowSceneDelegate { + let appDelegate: NewsBlurAppDelegate = .shared + + var window: UIWindow? + + func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { + appDelegate.window = window + appDelegate.prepareViewControllers() + } +} diff --git a/clients/ios/Classes/StoryPagesObjCViewController.h b/clients/ios/Classes/StoryPagesObjCViewController.h index 307a47ac03..dcc1495b3c 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.h +++ b/clients/ios/Classes/StoryPagesObjCViewController.h @@ -156,6 +156,11 @@ - (IBAction)tapProgressBar:(id)sender; - (IBAction)toggleTextView:(id)sender; +- (IBAction)toggleStorySaved:(id)sender; +- (IBAction)toggleStoryUnread:(id)sender; +- (IBAction)showTrain:(id)sender; +- (IBAction)showShare:(id)sender; + - (void)finishMarkAsSaved:(NSDictionary *)params; - (BOOL)failedMarkAsSaved:(NSDictionary *)params; - (void)finishMarkAsUnsaved:(NSDictionary *)params; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index 8b3d92c49c..a49ce3d0d1 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -1473,15 +1473,23 @@ - (IBAction)toggleTextView:(id)sender { // [self.appDelegate.feedDetailViewController changedStoryHeight:currentPage.webView.scrollView.contentSize.height]; } -- (void)toggleStorySaved:(id)sender { +- (IBAction)toggleStorySaved:(id)sender { [appDelegate.storiesCollection toggleStorySaved]; } -- (void)toggleStoryUnread:(id)sender { +- (IBAction)toggleStoryUnread:(id)sender { [appDelegate.storiesCollection toggleStoryUnread]; [appDelegate.feedDetailViewController reload]; // XXX only if successful? } +- (IBAction)showTrain:(id)sender { + [self.appDelegate openTrainStory:self.appDelegate.storyPagesViewController.fontSettingsButton]; +} + +- (IBAction)showShare:(id)sender { + [self.appDelegate.storyPagesViewController.currentPage openShareDialog]; +} + - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { if (action == @selector(toggleTextView:) || action == @selector(scrollPageDown:) || diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index edd193a22e..f6f6b05de1 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -17,6 +17,8 @@ 170E3CD124F8A664009CE819 /* SplitViewDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 170E3CD024F8A664009CE819 /* SplitViewDelegate.swift */; }; 170E3CD324F8A89B009CE819 /* HorizontalPageViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 170E3CD224F8A89B009CE819 /* HorizontalPageViewController.swift */; }; 170E3CD724F8AB0D009CE819 /* FeedDetailViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 170E3CD624F8AB0D009CE819 /* FeedDetailViewController.swift */; }; + 17150E1E2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; + 17150E1F2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; 1715D02B2166B3F900227731 /* PremiumManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 1715D02A2166B3F900227731 /* PremiumManager.m */; }; 171B6FFD25C4C7C8008638A9 /* StoryPagesViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171B6FFC25C4C7C8008638A9 /* StoryPagesViewController.swift */; }; 1721C9D12497F91A00B0EDC4 /* mute_gray.png in Resources */ = {isa = PBXBuildFile; fileRef = 1721C9D02497F91900B0EDC4 /* mute_gray.png */; }; @@ -1429,6 +1431,7 @@ 170E3CD024F8A664009CE819 /* SplitViewDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SplitViewDelegate.swift; sourceTree = ""; }; 170E3CD224F8A89B009CE819 /* HorizontalPageViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = HorizontalPageViewController.swift; sourceTree = ""; }; 170E3CD624F8AB0D009CE819 /* FeedDetailViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FeedDetailViewController.swift; sourceTree = ""; }; + 17150E1D2B05775A004D5309 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 1715D0292166B3F900227731 /* PremiumManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PremiumManager.h; sourceTree = ""; }; 1715D02A2166B3F900227731 /* PremiumManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PremiumManager.m; sourceTree = ""; }; 171B6FFC25C4C7C8008638A9 /* StoryPagesViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StoryPagesViewController.swift; sourceTree = ""; }; @@ -3317,6 +3320,7 @@ 175792E62930611B00490924 /* LaunchScreenDev.xib */, 1D3623240D0F684500981E51 /* NewsBlurAppDelegate.h */, 1D3623250D0F684500981E51 /* NewsBlurAppDelegate.m */, + 17150E1D2B05775A004D5309 /* SceneDelegate.swift */, FFD1D72F1459B63500E46F89 /* BaseViewController.h */, FFD1D7301459B63500E46F89 /* BaseViewController.m */, 17C074941C14C46B00CFCDB7 /* ThemeManager.h */, @@ -3835,8 +3839,9 @@ 29B97313FDCFA39411CA2CEA /* Project object */ = { isa = PBXProject; attributes = { + BuildIndependentTargetsInParallel = YES; LastSwiftUpdateCheck = 1120; - LastUpgradeCheck = 1420; + LastUpgradeCheck = 1500; ORGANIZATIONNAME = NewsBlur; TargetAttributes = { 173CB30C26BCE94700BA872A = { @@ -5091,6 +5096,7 @@ 175792BA2930605500490924 /* OfflineFetchStories.m in Sources */, 175792BB2930605500490924 /* OfflineFetchText.m in Sources */, 175792BC2930605500490924 /* OfflineFetchImages.m in Sources */, + 17150E1F2B05775A004D5309 /* SceneDelegate.swift in Sources */, 175792BD2930605500490924 /* SplitViewDelegate.swift in Sources */, 175792BE2930605500490924 /* Reachability.m in Sources */, 175792BF2930605500490924 /* NBSwipeableCell.m in Sources */, @@ -5290,6 +5296,7 @@ FF855B5E1794B0760098D48A /* OfflineFetchStories.m in Sources */, 17362ADD23639B4E00A0FCCC /* OfflineFetchText.m in Sources */, FF855B611794B0830098D48A /* OfflineFetchImages.m in Sources */, + 17150E1E2B05775A004D5309 /* SceneDelegate.swift in Sources */, 170E3CD124F8A664009CE819 /* SplitViewDelegate.swift in Sources */, FF8D1EA71BAA304E00725D8A /* Reachability.m in Sources */, FFCDD90117F65A71000C6483 /* NBSwipeableCell.m in Sources */, @@ -5623,7 +5630,6 @@ PRODUCT_NAME = "NB Alpha"; PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; - STRIP_INSTALLED_PRODUCT = NO; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = YES; SUPPORTS_MAC_DESIGNED_FOR_IPHONE_IPAD = NO; @@ -5911,7 +5917,6 @@ PRODUCT_NAME = NewsBlur; PROVISIONING_PROFILE = ""; PROVISIONING_PROFILE_SPECIFIER = ""; - STRIP_INSTALLED_PRODUCT = NO; SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; @@ -5970,6 +5975,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = NO; CLANG_ENABLE_MODULES = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; @@ -5996,6 +6002,7 @@ DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = "compiler-default"; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; @@ -6014,7 +6021,6 @@ PROVISIONING_PROFILE = ""; RUN_CLANG_STATIC_ANALYZER = YES; SDKROOT = iphoneos; - STRIP_INSTALLED_PRODUCT = NO; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; SWIFT_VERSION = 5.0; TARGETED_DEVICE_FAMILY = "1,2"; @@ -6025,6 +6031,7 @@ isa = XCBuildConfiguration; buildSettings = { ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = NO; CLANG_ENABLE_MODULES = YES; CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; @@ -6050,6 +6057,7 @@ CURRENT_PROJECT_VERSION = 151; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; GCC_C_LANGUAGE_STANDARD = "compiler-default"; GCC_NO_COMMON_BLOCKS = YES; GCC_WARN_64_TO_32_BIT_CONVERSION = YES; @@ -6068,7 +6076,6 @@ OTHER_LDFLAGS = "-ObjC"; PROVISIONING_PROFILE = ""; SDKROOT = iphoneos; - STRIP_INSTALLED_PRODUCT = NO; SWIFT_COMPILATION_MODE = wholemodule; SWIFT_OPTIMIZATION_LEVEL = "-O"; SWIFT_VERSION = 5.0; diff --git a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/NewsBlur Alpha.xcscheme b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/NewsBlur Alpha.xcscheme index f335651082..ae49d9d5d9 100644 --- a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/NewsBlur Alpha.xcscheme +++ b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/NewsBlur Alpha.xcscheme @@ -1,6 +1,6 @@ ChronicleSSm-MediumItalic.otf ChronicleSSm-BookItalic.otf + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + UISceneConfigurations + + UIWindowSceneSessionRoleApplication + + + UISceneConfigurationName + Default Configuration + UISceneDelegateClassName + $(PRODUCT_MODULE_NAME).SceneDelegate + UISceneStoryboardFile + MainInterface + + + + UIApplicationShortcutItems diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 5c97a96af6..2691040dcd 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -1,9 +1,9 @@ - + - + @@ -15,7 +15,7 @@ - + @@ -111,7 +111,7 @@ - + @@ -244,6 +244,164 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -257,6 +415,7 @@ + @@ -288,6 +447,7 @@ + @@ -303,6 +463,7 @@ + @@ -346,7 +507,7 @@ - + @@ -415,7 +576,7 @@ - + @@ -430,7 +591,7 @@ - + From 17be33726e379b62ae113114aee89d7e015e60ed Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Wed, 15 Nov 2023 21:31:16 -0600 Subject: [PATCH 06/69] Bumped build number --- clients/ios/NewsBlur.xcodeproj/project.pbxproj | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index f6f6b05de1..441fa60ce5 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -5998,7 +5998,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 151; + CURRENT_PROJECT_VERSION = 152; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; @@ -6054,7 +6054,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 151; + CURRENT_PROJECT_VERSION = 152; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_USER_SCRIPT_SANDBOXING = YES; From ca3feb3e2f86955d9f4c500ec00cd7f86c2e8946 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 11 Dec 2023 18:16:18 -0600 Subject: [PATCH 07/69] Build number bump --- clients/ios/NewsBlur.xcodeproj/project.pbxproj | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 12eaf5da58..a02462b3cf 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -5806,7 +5806,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 151; + CURRENT_PROJECT_VERSION = 152; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; @@ -5861,7 +5861,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 151; + CURRENT_PROJECT_VERSION = 152; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = "compiler-default"; From ac98426596cc14490147d320673d39f1bfa83c48 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 11 Dec 2023 22:25:39 -0600 Subject: [PATCH 08/69] #1247 (Mac Catalyst edition) - Updated the About window. - Now has Mac-specific settings. - Split Settings window into multiple panes. --- clients/ios/Classes/NewsBlurAppDelegate.m | 2 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 20 +- .../ios/Resources/MainInterface.storyboard | 4 +- clients/ios/Resources/mac/Credits.rtf | 13 + .../mac/Settings.bundle/Advanced.plist | 111 ++++++ .../mac/Settings.bundle/AdvancedTemplate.png | Bin 0 -> 3523 bytes .../mac/Settings.bundle/Appearance.plist | 149 ++++++++ .../Settings.bundle/AppearanceTemplate.png | Bin 0 -> 2409 bytes .../Note, this is the Mac app.txt | 1 + .../Resources/mac/Settings.bundle/Root.plist | 181 ++++++++++ .../Settings.bundle/Story List Template.png | Bin 0 -> 1771 bytes .../mac/Settings.bundle/Story List.plist | 321 ++++++++++++++++++ .../mac/Settings.bundle/en.lproj/Root.strings | Bin 0 -> 546 bytes .../mac/Settings.bundle/theme_dark.png | Bin 0 -> 1160 bytes .../mac/Settings.bundle/theme_dark@2x.png | Bin 0 -> 1755 bytes .../mac/Settings.bundle/theme_light.png | Bin 0 -> 1293 bytes .../mac/Settings.bundle/theme_light@2x.png | Bin 0 -> 2071 bytes 17 files changed, 795 insertions(+), 7 deletions(-) create mode 100644 clients/ios/Resources/mac/Credits.rtf create mode 100644 clients/ios/Resources/mac/Settings.bundle/Advanced.plist create mode 100644 clients/ios/Resources/mac/Settings.bundle/AdvancedTemplate.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/Appearance.plist create mode 100644 clients/ios/Resources/mac/Settings.bundle/AppearanceTemplate.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/Note, this is the Mac app.txt create mode 100644 clients/ios/Resources/mac/Settings.bundle/Root.plist create mode 100644 clients/ios/Resources/mac/Settings.bundle/Story List Template.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/Story List.plist create mode 100644 clients/ios/Resources/mac/Settings.bundle/en.lproj/Root.strings create mode 100644 clients/ios/Resources/mac/Settings.bundle/theme_dark.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/theme_dark@2x.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/theme_light.png create mode 100644 clients/ios/Resources/mac/Settings.bundle/theme_light@2x.png diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index fea6707eb6..4f38bf572b 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -458,7 +458,7 @@ - (void)registerDefaultsFromSettingsBundle { return; } - NSString *name = !self.isPhone ? @"Root~ipad.plist" : @"Root.plist"; + NSString *name = !self.isMac && !self.isPhone ? @"Root~ipad.plist" : @"Root.plist"; NSDictionary *settings = [NSDictionary dictionaryWithContentsOfFile:[settingsBundle stringByAppendingPathComponent:name]]; NSArray *preferences = [settings objectForKey:@"PreferenceSpecifiers"]; diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 002f2e8603..d784e940ec 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -505,7 +505,7 @@ 1757920A2930605500490924 /* g_icn_textview_black.png in Resources */ = {isa = PBXBuildFile; fileRef = FF83FF0F1FB54691008DAC0F /* g_icn_textview_black.png */; }; 1757920B2930605500490924 /* logo_58.png in Resources */ = {isa = PBXBuildFile; fileRef = FF322234185BC1AA004078AA /* logo_58.png */; }; 1757920C2930605500490924 /* logo_144.png in Resources */ = {isa = PBXBuildFile; fileRef = FFC486A619CA40B700F4758F /* logo_144.png */; }; - 1757920D2930605500490924 /* Settings.bundle in Resources */ = {isa = PBXBuildFile; fileRef = FFF1E4C717750BDD00BF59D3 /* Settings.bundle */; }; + 1757920D2930605500490924 /* Settings.bundle in Resources */ = {isa = PBXBuildFile; fileRef = FFF1E4C717750BDD00BF59D3 /* Settings.bundle */; platformFilter = ios; }; 1757920E2930605500490924 /* menu_icn_preferences.png in Resources */ = {isa = PBXBuildFile; fileRef = FFF1E4C917750D2C00BF59D3 /* menu_icn_preferences.png */; }; 1757920F2930605500490924 /* menu_icn_preferences@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = FFF1E4CA17750D2C00BF59D3 /* menu_icn_preferences@2x.png */; }; 175792102930605500490924 /* checkmark.png in Resources */ = {isa = PBXBuildFile; fileRef = FF855B541794A53A0098D48A /* checkmark.png */; }; @@ -846,6 +846,10 @@ 17EB505D1BE4411E0021358B /* choose_font@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 17EB505B1BE4411E0021358B /* choose_font@2x.png */; }; 17EB50601BE46A900021358B /* FontListViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 17EB505F1BE46A900021358B /* FontListViewController.m */; }; 17EB50621BE46BB00021358B /* FontListViewController.xib in Resources */ = {isa = PBXBuildFile; fileRef = 17EB50611BE46BB00021358B /* FontListViewController.xib */; }; + 17EE11C82B27FA0C00E7C0CC /* Settings.bundle in Resources */ = {isa = PBXBuildFile; fileRef = 17EE11C72B27FA0C00E7C0CC /* Settings.bundle */; }; + 17EE11C92B27FA0C00E7C0CC /* Settings.bundle in Resources */ = {isa = PBXBuildFile; fileRef = 17EE11C72B27FA0C00E7C0CC /* Settings.bundle */; platformFilter = maccatalyst; }; + 17EE11CE2B28011D00E7C0CC /* Credits.rtf in Resources */ = {isa = PBXBuildFile; fileRef = 17EE11CD2B28011D00E7C0CC /* Credits.rtf */; }; + 17EE11CF2B28011D00E7C0CC /* Credits.rtf in Resources */ = {isa = PBXBuildFile; fileRef = 17EE11CD2B28011D00E7C0CC /* Credits.rtf */; }; 17F156711BDABBF60092EBFD /* safari_shadow@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 17F156701BDABBF60092EBFD /* safari_shadow@2x.png */; }; 17F363F2238E417300D5379D /* WidgetExtensionViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17F363EF238E417300D5379D /* WidgetExtensionViewController.swift */; }; 17F39EAA264754CD004B46D1 /* image_preview_large_left.png in Resources */ = {isa = PBXBuildFile; fileRef = 17F39EA6264754CC004B46D1 /* image_preview_large_left.png */; }; @@ -1629,6 +1633,8 @@ 17EB505E1BE46A900021358B /* FontListViewController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = FontListViewController.h; sourceTree = ""; }; 17EB505F1BE46A900021358B /* FontListViewController.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = FontListViewController.m; sourceTree = ""; }; 17EB50611BE46BB00021358B /* FontListViewController.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; name = FontListViewController.xib; path = Classes/FontListViewController.xib; sourceTree = SOURCE_ROOT; }; + 17EE11C72B27FA0C00E7C0CC /* Settings.bundle */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.plug-in"; name = Settings.bundle; path = mac/Settings.bundle; sourceTree = ""; }; + 17EE11CD2B28011D00E7C0CC /* Credits.rtf */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.rtf; name = Credits.rtf; path = mac/Credits.rtf; sourceTree = ""; }; 17F156701BDABBF60092EBFD /* safari_shadow@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "safari_shadow@2x.png"; sourceTree = ""; }; 17F363EF238E417300D5379D /* WidgetExtensionViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = WidgetExtensionViewController.swift; sourceTree = ""; }; 17F39EA6264754CC004B46D1 /* image_preview_large_left.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = image_preview_large_left.png; sourceTree = ""; }; @@ -2681,12 +2687,14 @@ children = ( 17A396D824F86A8F0023C9E2 /* MainInterface.storyboard */, FF191E4E18A323F400473252 /* Images.xcassets */, + 17EE11CD2B28011D00E7C0CC /* Credits.rtf */, 430C4BBE15D7208000B9F63B /* FTUX */, 431B857815A132C500DCE497 /* js */, 431B857715A132BE00DCE497 /* css */, 1753696F1BE535CF00904D00 /* fonts */, 431B857615A132B600DCE497 /* Images */, FFF1E4C717750BDD00BF59D3 /* Settings.bundle */, + 17EE11C72B27FA0C00E7C0CC /* Settings.bundle */, E1D123FD1C66753D00434F40 /* Localizable.stringsdict */, FF8C49921BBC9D140010D894 /* App.entitlements */, 8D1107310486CEB800E47090 /* Info.plist */, @@ -3963,10 +3971,12 @@ buildActionMask = 2147483647; files = ( 175790632930605500490924 /* ChronicleSSm-Book.otf in Resources */, + 1757920D2930605500490924 /* Settings.bundle in Resources */, 175790642930605500490924 /* WhitneySSm-Book-Bas.otf in Resources */, 175790652930605500490924 /* ChronicleSSm-BookItalic.otf in Resources */, 175790662930605500490924 /* WhitneySSm-Medium-Bas.otf in Resources */, 175790672930605500490924 /* icons8-stack-of-paper-100.png in Resources */, + 17EE11C92B27FA0C00E7C0CC /* Settings.bundle in Resources */, 175790682930605500490924 /* WhitneySSm-MediumItalic-Bas.otf in Resources */, 175790692930605500490924 /* WhitneySSm-BookItalic-Bas.otf in Resources */, 1757906A2930605500490924 /* barbutton_sort_desc@3x.png in Resources */, @@ -4174,6 +4184,7 @@ 175791342930605500490924 /* g_icn_greensun@2x.png in Resources */, 175791352930605500490924 /* ak-icon-infrequent.png in Resources */, 175791362930605500490924 /* nav_icn_add.png in Resources */, + 17EE11CF2B28011D00E7C0CC /* Credits.rtf in Resources */, 175791372930605500490924 /* icons8-pyramids-100.png in Resources */, 175791382930605500490924 /* ak-icon-global.png in Resources */, 175791392930605500490924 /* content_preview_large@2x.png in Resources */, @@ -4390,7 +4401,6 @@ 1757920A2930605500490924 /* g_icn_textview_black.png in Resources */, 1757920B2930605500490924 /* logo_58.png in Resources */, 1757920C2930605500490924 /* logo_144.png in Resources */, - 1757920D2930605500490924 /* Settings.bundle in Resources */, 1757920E2930605500490924 /* menu_icn_preferences.png in Resources */, 1757920F2930605500490924 /* menu_icn_preferences@2x.png in Resources */, 175792102930605500490924 /* checkmark.png in Resources */, @@ -4488,6 +4498,7 @@ 433323B9158901A40025064D /* fountain_pen@2x.png in Resources */, 433323BB158901C10025064D /* login_background.png in Resources */, FF83FF151FB54693008DAC0F /* g_icn_lightning.png in Resources */, + 17EE11CE2B28011D00E7C0CC /* Credits.rtf in Resources */, 17BE5A761C5DDA8C0075F92C /* barbutton_sort_desc@2x.png in Resources */, 433323BE1589022C0025064D /* user.png in Resources */, 433323BF1589022C0025064D /* user@2x.png in Resources */, @@ -4693,6 +4704,7 @@ 17EB505D1BE4411E0021358B /* choose_font@2x.png in Resources */, FFC5F30C16E2D2C2007AC72C /* story_share_appnet_active@2x.png in Resources */, FFC5F30D16E2D2C2007AC72C /* story_share_appnet.png in Resources */, + 17EE11C82B27FA0C00E7C0CC /* Settings.bundle in Resources */, FFC5F30E16E2D2C2007AC72C /* story_share_appnet@2x.png in Resources */, 1740C6A11C1110BA005EA453 /* theme_color_medium-sel@2x.png in Resources */, 17876BA01C9911D40055DD15 /* g_icn_folder_sm.png in Resources */, @@ -6002,7 +6014,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; + CURRENT_PROJECT_VERSION = 153; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; @@ -6058,7 +6070,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; + CURRENT_PROJECT_VERSION = 153; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_USER_SCRIPT_SANDBOXING = YES; diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 2691040dcd..f7dae1270f 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -1,9 +1,9 @@ - + - + diff --git a/clients/ios/Resources/mac/Credits.rtf b/clients/ios/Resources/mac/Credits.rtf new file mode 100644 index 0000000000..bd43258cf3 --- /dev/null +++ b/clients/ios/Resources/mac/Credits.rtf @@ -0,0 +1,13 @@ +{\rtf1\ansi\ansicpg1252\cocoartf2758 +\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;} +{\colortbl;\red255\green255\blue255;} +{\*\expandedcolortbl;;} +\margl1440\margr1440\vieww28800\viewh21020\viewkind0 +\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\qc\partightenfactor0 +{\field{\*\fldinst{HYPERLINK "https://www.newsblur.com/"}}{\fldrslt +\f0\fs22 \cf0 newsblur.com}} +\f0\fs22 \ +\ +{\field{\*\fldinst{HYPERLINK "https://newsblur.com/privacy"}}{\fldrslt Privacy Policy}} \'95 {\field{\*\fldinst{HYPERLINK "https://newsblur.com/tos"}}{\fldrslt Terms of Service}}\ +\ +Copyright NewsBlur, Inc.} \ No newline at end of file diff --git a/clients/ios/Resources/mac/Settings.bundle/Advanced.plist b/clients/ios/Resources/mac/Settings.bundle/Advanced.plist new file mode 100644 index 0000000000..c2387232d9 --- /dev/null +++ b/clients/ios/Resources/mac/Settings.bundle/Advanced.plist @@ -0,0 +1,111 @@ + + + + + PreferenceSpecifiers + + + Title + Offline stories + Type + PSGroupSpecifier + + + Type + PSToggleSwitchSpecifier + Title + Download stories + Key + offline_allowed + DefaultValue + + + + Type + PSToggleSwitchSpecifier + Title + Download text + Key + offline_text_download + DefaultValue + + + + Type + PSToggleSwitchSpecifier + Title + Download images + Key + offline_image_download + DefaultValue + + + + FooterText + More stories take more disk space, but otherwise storing more stories has no noticable effect on performance. + Type + PSMultiValueSpecifier + Title + Store + Titles + + 100 stories + 500 stories + 1,000 stories + 2,000 stories + 5,000 stories + 10,000 stories + + DefaultValue + 1000 + Values + + 100 + 500 + 1000 + 2000 + 5000 + 10000 + + Key + offline_store_limit + + + Type + IASKButtonSpecifier + Title + Delete offline stories... + Key + offline_cache_empty_stories + + + Title + Custom Domain + FooterText + Leave blank to use the NewsBlur site, or enter the URL of your self-hosted NewsBlur installation, e.g. "https://www.domain.com". Takes effect next time app is opened. + Type + PSGroupSpecifier + + + Type + PSTextFieldSpecifier + KeyboardType + URL + AutocorrectionType + No + DefaultValue + + AutocapitalizationType + None + Title + URL + Key + custom_domain + + + StringsTable + Root + Icon + AdvancedTemplate + + diff --git a/clients/ios/Resources/mac/Settings.bundle/AdvancedTemplate.png b/clients/ios/Resources/mac/Settings.bundle/AdvancedTemplate.png new file mode 100644 index 0000000000000000000000000000000000000000..7cff794d5fda87b57f1bbb8fdc4a16fc50101c9b GIT binary patch literal 3523 zcmZ`+2{@E%8=f&^ELoE%I>u7`iZL_RVMvxz_AQP*22*Bfj6KVk>`O&I5|YqzI!KwE zgc*MkiE-Lc7~7#^J=T$fe})#vb^5OBec$JMp8I~5_x+yty1o=!>*K+^0Qd{{9%tn~Cpd%Sb!#;b ziooInpjH?kGSt|YK*B?H)HT(S0E7ktiGm|ga5xmfnUN?h9RL7S4E&x|G0!*8+Qp!6 zGPf3hoge!!XAvNta190kc1dst5RhNMNe{G&b#x)SSXrWc2{?5hKf)P|dN_{AWdRJr zQ5+G6A^SkXaY6WCR5)5;6M^E$Ts2Gqx(OklLo2vg*+NYSBn(trT|-?%!B7wig&L6j z{89F%=3nTXH?%?^nM_2%U}0fl>S0KA0x19n*VEI3X&_(-gc=8-794>m`-H3EgB7=& zeD!0B3HBvniDWDR59RvxIYS5`qZJgmfxbOkd6KdI-y`9Jzm&x(5XQB@;OZK%f1_c- zu|J@3EkC4j_UKz#{|FZDL);>5ZpK!Q2HXr#CIlRjgb5Dj)M5xX*v#r5y6-h@;q5WO zgrE>^=}veo*%0xCvO~R-e#N^iMI_T zkO`bGhlKUDz>_f~&L(_ee@*Ylh;6>O$APlNhGT+UOtCl&esdqSG!09MAp{Xf z4n!Yc3^(UZ$~M(b^kx{>?_o4KnSKwm&9IHg`65szBu)<#NR9+Tkl_!y+5ol_xWk7P z!4K;nVd_K17;-NFH8?^I4tM-|0c_E?VZN{pVBGfD)-GGUxT)ndqoE*&__eVO1>0hJ z`Z?Df#lqCsF_8CFo^X|ulVnRoF~!^^CPOZ{RFUGC`1=X03$)5VJy8M;)xvm>Qv zvWDAtDG4hca@NSN=rR>jZ9Umsh(}V6=`B7fy z{)m0wd+yizsH5yn}M2@ z`&+FBA9u{`$byMykrxzDT z^o7rQG)^NTXR{y_gf{pW?1spvdU ze~Qi3$W`QrVDk9vd{cu)8?lo$5C=*Vu-D0nv|$vc8&*jc*JA45%z$72wW|D7-tXuD zWYoC+;`ATmR{(ukW9Iu}(7O5bL`D(_7TEf>RyUi?SeI=#X4?xOuB7uym~@F*GiBn& zytQ;CWnHHE`=rGJF!+RM(aDMIPB|9-b%9D42*mIcn{vOduX&XCTtSlHmlz8fN|&sg za8s$RIh%~CyqWAYkvRGAi1OqII%H2#CsYnBtjM?Ic_)1DTw0yugFCR#xgYZ#$Tc4~ zAX<62IgeT-%$>oek!aNB!7x7U?4#yaqp`BrQVH}MR@DN}^e25;+1@9{2R}RQ4rzb! zn&KrQYdw?^?u--w8LO^vU!E=jribQ`M|Oo*8VDi?v;by1n5c%;+9GK zpPz?j4vg{`$u+L-8g(yibQbiK$MmKTA>Twj4pPZIkcUL=lNBw9=uBP+Bg$s5-AP0f zpLs?mZN}?CECp4hHz6zRcBf!mjyRBZ>Mz&3A~!(Z0f5u9+7l&pd@HuzLKAVP?Tw%r zJT}xW7C+OxFllfx1rinSX6#onoZ3GgJ>L9M=|cR*9ycrVvX6pI>rNwikM6s+t^n5<^Qft}Ql;l5F+f4(rhh zdcYMVONRMy-biA~{c7){zPnw}xhdW?%!}KUq+O_idlBVkMGjTJl?;HD<6E*k3*Bfr zwx0_nU+O=8m;Z_wJK1r;qp3k^wtd7jLT8r03fFnc{Drt}=sU-f3Epywop)UWa|Ng4 z(cg8_d2HgU;&f+%;n-b!UFjEq45HWH4=FTlqI@^HvU@OC`pIDmBegQ_hz1+X5~*6J z+KIYuj1x@9!x9|^QrJ#sTi$lab&n#olM-K{exG1zd-tl%Oa=FJiWYY!LvAakpL*^s zBi`;TVFJ8yvcB$!L(9XnOY`X~+Gqi{(DlnTG9wzSC4_$dn!#&U>!{`0|B? z&}s8$>P7#X2=pyVFqO0TSgHlCTdQaudt1?Z8NP?0XbJp9FjBv6zpSX|sYt#Y z@r-@l@^{B0^u+T7sFQDu)tgPo9jH4h1C}>)B0f)_+8dQw|ko8QXq66)O$^Lm@uAD75O^|%}^$36kA8LBe{dIikiVx1@3 z9PB&Fwz(?*A)F`NXVo-nH@@Dr%7L#c@t=S) zzyb_%s)3HvEMU}w-v=m7s#evF@~mVo+CalnV$ys(wPxa@7{lJYOIGE~^4*0EW=Svk z?+F_kgiP}m^!eoc<(e6dE)gh)F&lQTLBg+8ryL!$u9a9dG4s9s0^BdCU{W+U05*;u zC_m?*kDHzDvX-T(=w!zlhAhb<>n*R*Bje`j51c5)Gs5bSo|DQC#M|&Va=kHt-&lc=ei&yW7F{ z=x#$ID%xACtCRI4`$ZVVUC bPRzk3l%c + + + + PreferenceSpecifiers + + + Title + Text Size + Type + PSGroupSpecifier + + + Type + PSToggleSwitchSpecifier + Title + Use system size + Key + use_system_font_size + DefaultValue + + + + Type + PSMultiValueSpecifier + Title + Feed and story list + Titles + + Extra small + Small + Medium + Large + Extra Large + + DefaultValue + medium + Values + + xs + small + medium + large + xl + + Key + feed_list_font_size + + + Type + PSMultiValueSpecifier + Title + Story detail + Titles + + Extra small + Small + Medium + Large + Extra Large + + DefaultValue + medium + Values + + xs + small + medium + large + xl + + Key + story_font_size + + + Title + Theme + Type + PSGroupSpecifier + + + Type + PSToggleSwitchSpecifier + Title + Follow system appearance + Key + theme_follow_system + DefaultValue + + + + Type + PSMultiValueSpecifier + Title + Current theme + Titles + + Light + Sepia + Medium + Dark + + DefaultValue + light + Values + + light + sepia + medium + dark + + Key + theme_style + + + Title + App badge + Type + PSGroupSpecifier + + + Type + PSMultiValueSpecifier + Title + Show unread count + Titles + + Off + Unread + Focus + Focus only + + DefaultValue + off + Values + + off + unread + focus + + Key + app_unread_badge + + + StringsTable + Root + Icon + AppearanceTemplate + + diff --git a/clients/ios/Resources/mac/Settings.bundle/AppearanceTemplate.png b/clients/ios/Resources/mac/Settings.bundle/AppearanceTemplate.png new file mode 100644 index 0000000000000000000000000000000000000000..6e78a2a52b3df0c96d644f04ee4bc0d83d9b9c92 GIT binary patch literal 2409 zcmeAS@N?(olHy`uVBq!ia0vp^IzTMT!3HERF6!e1QjEnx?oJHr&dIz4a;~ICc&7RK zGH3zW91I+cYz)jGB@9eJLI}!c1hS}lsQ<<5UuHc)RSfb#VoL`iuV6JDPXUt$^U}S7%Xk=w* zs9ZUIOagH44MkeQoWlBiITo0C^;Rbi_HR$&EXgM{^! z6u?SKvTc!Ep>C3Z$oEoj2% zx*`zrcIGfW$b#q^BC%;e7DU(JUzC{&j4hDE;Ql}sLsuV=kzbMzOgTlF$)0&7sYSp* zfa-=h54Q>=vp~^d6_8nxniJuYnVXsi4o4Fc8-1)2=o+2#bMlLV3lfu4L2d?XK@vh& z4Yno{VU3X`vNcE&NE(4D!pgY_n1}O=Lh|!->~Q328+~+zXlD54r(~v8x+IpQ+JOoH zT|*;XL&FeQ0RYjCqz0sZnbD35s1cU2?YLwOnP)OEu&8^wIEF-U z9-V%-J1kV>So)b0Qv{cK@Y+kbDyn38UFMn?C_U@qDarS{xSRelowPr&NFn8dbn`Be zo)1aK`j-ic^a!2e4s?;wTA6TSg32EcP1*YMGxzqN-+g}G_uBMDGkIUX**b6Qw|nO! z)8Bu6w|!}9+pQZnBBVbV8uirlt;uWi$~JcV@6N+wpLowa=Uw2Jb=R(4tLIt0Vzbr> z6NU-3lTxcDEuDAl-n0bKVmFS%k9?m@fbgSR!x0lpIYWRx9pHW z6gRhh!u#a=9A~C{-QG0s?XtNBIySqNWj1jqI3HVg{Y#hC+Ln{=78~41)M0C~%ebNS zXrA$Ix3%6g8uxnGZ<3A-FfPhECf0Ve^4(&2?#q81)^8HK-4Z!(f&ojp;V__SXFq0z#V~+WnlJb!O$Q z-N%}eOAS^8lybah4TwMdy@oNIqq$7cuzUH*7LWQ+mwtPjV7BX_f3LjGuASVSwl@1g z`q}4?xh&joaJnvf-f=A@OMlYFJOAXqtNAB|R{sqvV@fjY_AE)P`W9~VuabY2Vo$L7 zGG4_8W`8ALCN@57SiO4B&YwYBg%z&F{hSjZrTtrF(R!)qvr(IsrK&6+%SN(0FSj&H z{rxuK&6B6!9*2L-zf$9d+=H&(b;dy1OCa{9olD!LPBEzuQvb~2p0p*dd{zAF!Y@mYW+nV%hL(_Gi~~C+B3o{X2>EckxEco{zH+*mK*xWx4vq zKe=tUp-Ygj`j@1IPvyHCr?XGr<~q%wVpl5Lp&4nr-*a3Ezc@4Sa*b-@{F)A*H=0gH z{P*@S|0pPU>2$^Kk0?K@_xru#mn8bnzdUk3dEVk5_c;HRt+nkw9@OyF%;V|mIP3o6 zD@-->-F674CYrop_nF@_tMB#Idl#ClQnKn5OT9(&*G+z*>Tha!?~qA*)ccBXoBvNE zr>GpB6%_f}s8Lz$j`AUE9t+P2Vi{NbA{z2S^j)Pt?K)lc*7oq6>x=8T_NMRjZ4tkz zvP;)AN8rN3KS!C8|B5Yfekp06R2Uq{E*YX0*ut-{U#VU3k-6%EUAg*kvSzu8fBzl- vCfrq*ux0O-8=5Jnc>U@PD&!ZfFZsxL|D2*=6 + + + + PreferenceSpecifiers + + + Type + PSChildPaneSpecifier + Title + Story List + File + Story List + + + Type + PSChildPaneSpecifier + Title + Appearance + File + Appearance + + + Type + PSChildPaneSpecifier + Title + Advanced + File + Advanced + + + Title + Feed list + Type + PSGroupSpecifier + + + Type + PSMultiValueSpecifier + Title + Feed list order + Titles + + Alphabetical + Most used then alphabetical + + DefaultValue + title + Values + + title + usage + + Key + feed_list_sort_order + + + Type + PSToggleSwitchSpecifier + Title + Show feeds after being read + Key + show_feeds_after_being_read + DefaultValue + + + + Type + PSMultiValueSpecifier + Title + When opening app + WantUpdate + + Titles + + Show feed list + Open All Stories + + DefaultValue + everything + Values + + feeds + everything + + Key + app_opening + + + Title + Reading Stories + Type + PSGroupSpecifier + + + Type + PSToggleSwitchSpecifier + Title + Scroll horizontally + Key + scroll_stories_horizontally + DefaultValue + + + + Type + PSToggleSwitchSpecifier + Title + Show public comments + Key + show_public_comments + DefaultValue + + + + Type + PSMultiValueSpecifier + Title + Default browser + Titles + + In-app browser + Safari + Chrome + Opera Mini + Firefox + Edge + Brave + + DefaultValue + inapp + Values + + inapp + safari + chrome + opera_mini + firefox + edge + brave + + Key + story_browser + + + FooterText + This setting only applies after the app is restarted. + Type + PSMultiValueSpecifier + Title + Play videos + Titles + + Inline + Full-screen + + DefaultValue + inline + Values + + inline + fullscreen + + Key + video_playback + + + Type + PSToggleSwitchSpecifier + Title + Show autoscroll + DefaultValue + + Key + story_autoscroll + + + StringsTable + Root + + diff --git a/clients/ios/Resources/mac/Settings.bundle/Story List Template.png b/clients/ios/Resources/mac/Settings.bundle/Story List Template.png new file mode 100644 index 0000000000000000000000000000000000000000..3c91f796208e705294fd1208a23e572e0d34626c GIT binary patch literal 1771 zcmeAS@N?(olHy`uVBq!ia0vp^>Od^Z!3HEXol5@!DaPU;cPEB*=VV?2Iag96JkxxA z8MJ_G4h9ZJHU?&p5(Xw9Ap~VJ0@+dwtYC2lhCD`TIJ=Bd14+FhNEd?y5Z5spf!QKJ z_O$j)1{SE^RX`d9Jb)NzDui~+sm#nvSMW_uEKzVw&M(SSFxNBDGiER{Ffz6>G_o=@ zR4@XPmR6=l3=GT*7!j^ozyvp0eE~C^Eeo=u{!kbvkm4-xh%9Dc;1&j9Muu5)fX-w7 zof#5Q65;D(m7Jfemza{Dl&V*eTL99lF>KRGtkGTEwv&Ohc=K5PHn(2f_n?avWmn4 zh$e7gK-_2pa)Fg|er`cgYH=~pFLs7DU{|4tA^ZeUACy|0pHm9*dT3r|iJcKt3z{&x zt_Xy@ojFVovLL#KNNgIA1<^J57iFdbV+-UkxId7^(A5WIt0zEKqb<1!PvF=0vz;=BDO>!_nB%MjxvLx<=>xocyBTg2d!hkek6;kc7}x zgRO}~SYw1@4Uz;lP)U{PRZwBrJ5gk@|yF0&s^7Qg~%rKgKyNCfBG z>F2dw0!5D7+RxI7n9#c-bJNC!IXNyO+^L6eh3QOb-4v+ixpCXnO`E=ppEx#UR>X{) z92`+WZilF07u80opX-Wzlol@Bbv=t%u}}TbC^z{_L26obH{hcPLgjw z(QEqm<(iLzQlWXA%akuLs^m;Q75gyw*e+I~U19QR3M-Gf2#6n4lK65^#=?o!i8X31 zQ`K`W8LzdP@7De>zPIy!n#GE$G)W!vBR-FRA1GArlVyLATEOsyqx$XI?*gl;I`;aTZR&Tenbje%ulKsK&Rr|lpp55?+C%R}(J)<9x zzxL>D=NJan3sdFeZ&-#Xge)l4UQnLb@OY}@rReXK&+Rt#ow@aHO%0!Ab6b9=b?Rr$ z?gx`PbG5^MOR3qh+wC{Z%8H-7Z9-4P<>CX&4GR0b?#y+YwsYgMbq9BEJ$8J + + + + PreferenceSpecifiers + + + Type + PSMultiValueSpecifier + Title + Story order + Titles + + Newest first + Oldest first + + DefaultValue + newest + Values + + newest + oldest + + Key + default_order + + + Type + PSMultiValueSpecifier + Title + Stories in a folder + Titles + + All stories + Unread only + + DefaultValue + unread + Values + + all + unread + + Key + default_folder_read_filter + + + Type + PSMultiValueSpecifier + Title + Stories in a site + Titles + + All stories + Unread only + + DefaultValue + all + Values + + all + unread + + Key + default_feed_read_filter + + + FooterText + The mark read options are always available via a long press on the mark read button in the stories list. + Type + PSMultiValueSpecifier + Title + Confirm mark read + Titles + + On folders and sites + On folders only + Never + + DefaultValue + folders + Values + + all + folders + never + + Key + default_confirm_read_filter + + + Type + PSMultiValueSpecifier + Title + After mark read + Titles + + Open the next site/folder + Stay on the feeds list + + DefaultValue + next + Values + + next + stay + + Key + after_mark_read + + + Type + PSMultiValueSpecifier + Title + When opening a site + Titles + + Open first story + Show stories + + DefaultValue + story + Values + + story + list + + Key + feed_opening + + + FooterText + You can change this setting in the Infrequent Site Stories view. + Type + PSMultiValueSpecifier + Title + Infrequent stories per month + Titles + + < 5 stories/month + < 15 stories/month + < 30 stories/month + < 60 stories/month + < 90 stories/month + + DefaultValue + 30 + Values + + 5 + 15 + 30 + 60 + 90 + + Key + infrequent_stories_per_month + + + Type + PSToggleSwitchSpecifier + Title + Show Infrequent Site Stories + DefaultValue + YES + Key + show_infrequent_site_stories + + + Type + PSToggleSwitchSpecifier + Title + Show Global Shared Stories + DefaultValue + YES + Key + show_global_shared_stories + + + Type + PSToggleSwitchSpecifier + Title + Mark stories read on scroll + DefaultValue + YES + Key + default_scroll_read_filter + + + Type + PSToggleSwitchSpecifier + Title + Show override mark read on scroll + DefaultValue + YES + Key + override_scroll_read_filter + + + Type + PSMultiValueSpecifier + Title + Column layout + Titles + + Automatic + Three columns + Two columns + One column + + DefaultValue + tile + Values + + auto + tile + displace + overlay + + Key + split_behavior + + + Type + PSMultiValueSpecifier + Title + Story titles layout + Titles + + Titles on left + Titles on top + Titles on bottom + Titles in grid + + DefaultValue + titles_on_left + Values + + titles_on_left + titles_on_top + titles_on_bottom + titles_in_grid + + Key + story_titles_position + + + Type + PSMultiValueSpecifier + Title + List style + Titles + + Standard + Experimental + + DefaultValue + standard + Values + + standard + experimental + + Key + story_titles_style + + + Type + PSMultiValueSpecifier + Title + Preview descriptions + Values + + title + short + medium + long + + Titles + + Title only + Short title and content + Medium title and content + Long title and content + + Key + story_list_preview_text_size + DefaultValue + medium + + + Type + PSMultiValueSpecifier + Title + Preview images + Titles + + None + Small Left + Large Left + Large Right + Small Right + + Values + + none + small_left + large_left + large_right + small_right + + Key + story_list_preview_images_size + DefaultValue + small_right + + + Icon + Story List Template + + diff --git a/clients/ios/Resources/mac/Settings.bundle/en.lproj/Root.strings b/clients/ios/Resources/mac/Settings.bundle/en.lproj/Root.strings new file mode 100644 index 0000000000000000000000000000000000000000..8cd87b9d6b20c1fbf87bd4db3db267fca5ad4df9 GIT binary patch literal 546 zcmaixOHRW;5JYRuDMndFh#Ua1V1d}N;sVAV2TO?uC3a9aJn*VxFrY}tnon0(S66#J z-d9>G>6W!ur(SDqlp`9nn~*(m%iWnv?yq`Qfp6XbK1?+om~~#r)ZnhkYQU_VbfjuT zHNn`CX<0sd*m1A}>&5sU$akD=GTXJ1e literal 0 HcmV?d00001 diff --git a/clients/ios/Resources/mac/Settings.bundle/theme_dark.png b/clients/ios/Resources/mac/Settings.bundle/theme_dark.png new file mode 100644 index 0000000000000000000000000000000000000000..57dc143e3a8dad22667f477264ffaa81fa700b21 GIT binary patch literal 1160 zcmeAS@N?(olHy`uVBq!ia0vp^q9Dw{1|(OCFP#RY7>k44ofy`glX(f`a29w(7Bet# z3xhBt!>lsd^Q;1whpd3^o;3KxS@gNuokUZcbjY zRfVk*ScMgk4HDK@QUEI{$+lIB@C{IK&M!(;Fx4~BGf=YQQczH^DN0GR3UYCSY6tRc zl`=|73as??%gf94%8m8%i_-NCEiEne4UF`SjC6r2bc-wVN)jt{^NN)rhQQ2mNi9w; z$}A|!%+FH*nVXoDUs__Tqy(}E4j}F<$xK7olvfP(7SMzGAQ^o_Jp+Ag+JK&gyAMTI zMPdO&6WCV}%WObKSw#k=7U$=bf*fUMZe#;CA4L%1I*116{M>?~)Z${022-edWHEI0 z5!ft17DUnjbib8zQEFmIeo;t%evX}6PGx3Zx`HoIM8PpRzbFqFH-#2$BH6qQDej#|6{~ zOHFoMX7BiVfQfUXr;B5Vh2Y#NyS>;Pd0Ok&cigyfvtz-H$c)7ao0C$P&i$A0wD4)YS zC@944{8g=T;o#*>-&@NcM(+?%V_9Mye)d>s>wV4%lG2VBJ1!i|_NwDGdL8^z`M;uS zy4w!%6@AI|Eqir)D<0qRmiVdh{*)8|?z7I58Q$E?TdhBDmp?_#n!;i2haqSn~ zqr!O1TJ|w6x!7dR!#P`}(^FjO*1pNVQ+e8g@0@wP^_I%ysi7Pzcn)2R$o8=k>2!7N z7p!Apv7b4kCB5-f*Mj{&rXSkTpi-+N|KH}kW^V}V#~B;sv~woKyzOi9{IK;9+q!Ro z-{v%^+`sN~P|ajwr)~WriS3QTzord>Q@jjA)nD*XFf)ER|L%oD>G@a{{p%Y4m# X>2dw_wk44ofy`glX(f`a29w(7Bet# z3xhBt!>lsd^Q;1whpd3^o;3KxS@gNuokUZcbjY zRfVk*ScMgk4HDK@QUEI{$+lIB@C{IK&M!(;Fx4~BGf=YQQczH^DN0GR3UYCSY6tRc zl`=|73as??%gf94%8m8%i_-NCEiEne4UF`SjC6r2bc-wVN)jt{^NN)rhQQ2mNi9w; z$}A|!%+FH*nVXoDUs__Tqy(}E4j}F<$xK7olvfP(7SMzGAQ^o_Jp+Ag+JK&gyAMTI zMPdO&6WCV}%WObKSw#k=7U$=bf*fUMZe#;CA4L%1I*116{M>?~)Z${022-edWHEI0 z5!ft17DUnjbib8zQEFmIeo;t%evX}6PGx3Zx`HoIM8PpRzbFqFH-#2$BH6qQDej#|6{~ zOHFoM{G2~^85metJY5_^A~=uEJm2jXC~-XfPsbk))r~VFc@}B>3_YP?&!xR_(?*H^ zjjc;{zqxru-t^oQ9Jb6uWRXOYnzEQM_kN>8AO4=H-e-C4&i8%a_r0%;_pg0#`P}k( z<-GTm=XO5dTfA)d>OCc=S^n@HaPHsWc0+NExEa^tLxvA~4V-6)SgEn}q^_50K4g6G zi01KDU#3a7u58JzIoDH@AN+ULhfo7YBe%JqRy6!EpVTe-XUhjygM;#`{tMNy-L0G| zw$0=Ck;us99nMXhPCQLpV*6I!Q&;%Nb;TsNW>(7nB!@#we9kPnYbUy+WXh7}w?PGQ zd>)mRz7sB~%sipR_>m=ht(l$gq{YW(o>7t%-YfH?q-P1UZs3F4PJ4Y8eV^?Y*3Z%L z7O2Md@%Arkl8#6&o_FZTCfsY?1kT6XLlpUdNuY@x4PJ8udf zoH+Rq6XT?p5-XRT49>LhzpbXp+3`;7TWg2w=Fe7A?}6$}UA;us?-BXTdhgSkx|PdM zwlC1M+L*~Fr5U8O*XV&Wv*+)uTW_AbY};j(>y@@OhIyL)+tb}qX*u)e@2bsgcw6fJ ztXzlZZtl(bKm0oL&OHy_emz~o<@P53;6?w|>PfHBPrg)|e$~g_>&i^c*V)ZH&sY|3 znye$8owcWacjO_Z59`I^f7KjNc=;szme~BYR;AybbsldN+xcYHau@rjee>>d25#z- z5y{xBHLqdS+lh%gbvHfvD3L!se^aZ@qxUzXRyU>3x;Xuyx=UBrW~*>E=7SwN;j^wz zdC|P+&7{I_#Sy=p7oOjhl3{xGZ;`Xv-Z>wI%vb$psZja7w9)Trw8D?KuR@>hWYA>K zb1``Lokjn~!c$&Gv(NolEVEv88;3xy8b|fy`AL2gb~6bdY9P<~aPGn(~y zU}={1g+4*`Uc;G>dgGsXscTP;w3@uMKl`-yp~A$a7Xm&-IEy|1dnmNgdCoTVg;|*& zSsvx|ojTp|!KFfRk8s4n!iW0;Jl+UwN@i+QkWN2-@br|)pC)dd`{w)FUw<1HXBiqs z>J{a4s<|jwZ(BEW(&uS=vu__?^k44ofy`glX(f`a29w(7Bet# z3xhBt!>lsd^Q;1whpd3^o;3KxS@gNuokUZcbjY zRfVk*ScMgk4HDK@QUEI{$+lIB@C{IK&M!(;Fx4~BGf=YQQczH^DN0GR3UYCSY6tRc zl`=|73as??%gf94%8m8%i_-NCEiEne4UF`SjC6r2bc-wVN)jt{^NN)rhQQ2mNi9w; z$}A|!%+FH*nVXoDUs__Tqy(}E4j}F<$xK7olvfP(7SMzGAQ^o_Jp+Ag+JK&gyAMTI zMPdO&6WCV}%WObKSw#k=7U$=bf*fUMZe#;CA4L%1I*116{M>?~)Z${022-edWHEI0 z5!ft17DUnjbib8zQEFmIeo;t%evX}6PGx3Zx`HoIM8PpRzbFqFH- zlO5L%maiv(Y0u2l#WBP}uy%^QcUYoC`}x(8Y)dsv#NKLpr56v@4|CRo^IKXBN4uvKO%yx+vTYsvi`mD8Wh zvAS_?;q%v#Zx4w4IB|Nxrt0hSHvh9U$d`Y2P@^Kn-R$dG%NvJ3u;#oz_uqkrZ zYxDnU(sQ0#aeke7`^xt8<~LJPOBvK|*z{f&2)}z~N98NyoP&lkj?4czg*~WG&$_e6 z@8>qJO^Yl4)Na^l9cH(#Ouk3^^XXrWCLR}xgVicu^qmcsU_XE1;D0{(1kp*yH|51$ zU%v9XNy&}Bi^L8d>+8+G8C%tMdjq>!Oq1`2Kb^rVOUYnoP|CrZ?WvPAXEwe3gU*`m$zr^x? z?l*_bb{amtSC+@)rro*u?;gZQ@m_xsORmVP&Wc5mhTi8XT$fyyXPS3j3^ HP6qH@4>PZfWl~96|9w6+ ziL~|k`f(yZAzG^xLd=i|ZOA_>i@sz{z8TtG+u8S==Y5{%davuc@B7^MIq&xjPge*S z4hDfh5O+6cFXe8fT3=`=-;W!g_$#+9A}`l{ppp*bDdj~o%FR~<0%<{2YYQkZ-v9(s zi{c#i0en0Tl0yVMEHhLP%)*L!QA#!lL=lsfM;;4cBE>u|UqlvDQR@t{@~nb!DC9Z? zh@_%?JQzs2K*&PcVQsN^6b+0-A}PYqFtV5P{*Q9yjfx5ffG9E!7aJRkjkUoFglrsv zL?YqvL>!TbQ8F;1I6lA>WB4M|4U^w}oLQm}Atwsp2>3{qFEdyW15i;YRiHnfjXVKP z*q@R3qK{=M3&g3u;0Rbe?(byEV*kkRFDfxJYD2VM)kZLi%8Kmk#S#g)F{-3!_C(6M z|6jmAv)d5R1-vLBOC(YW>^|~0W1sSU{w}~~aFalpKbbCMF#&<_ut312?dQgE_-v$y z5Pef z>dB@aZ4AIVs+<5CSSkE{nrPqwdgbRJ&}WYB&Q6DYH1A4(OhX(oXoBWSgT_8j1&1Mn z)b^u!+obz3!8a3=T=cHy!nW{cd6jO4S{WW*qx77by4)>B@+OpzrPcDuzO(P=-_@}% z&&&>zL|@g#nut576AId!i#G4(=KAl#2Yb%kyU>aAC-0Ew_Q0Cst0un{Ror0vjm_oB zy+@0&&)~YEqU20O+e9Mq|g-c_WstJuBEq0JCnw3-*t7n&BWAbbUxHbootO*lxED*SL54$yzLHL zqPAKQjLwe}uitY4SUpD2XP(oO9oPm1<5_+;)uhON*dL#D1US2AS6RA5CvV2MvI2GY z-#rZmNg4raKw8G}hL~~-0=laY#wk&nw?%}eGwo$@v)Yw31@_+aX6y_1siQBlF(ba{ z5WmqXxSX1&PH~YXy!LSfhmuqE@~>tv(P_3-5S=H!-UVg18k&&PYbOF9{5BLjA30gv zHrn5kGW^Xlk)lzUa-2?8Upt*h*D=}qL48%Herw{1@mA||Qw5Df53x&v1w~6{BkoEy zHF;vuKJ?om&1IzCOEIaSCG~NG^qK*Kr^{grnT%KQhaTk2AF-gN%*e2uH3y1X;OUWV z3EJOTeg6OmyVO@w4as3oq+A@ZgmN9TR(1MPh%tP}fnr7=6CF}wtT`Q>sOGUs;0EoW zMJm`oIUR#fUn?az2cPdrZ34R4!~$-A9}Fh3&XZUikV6(@Zj=%pWgn7nONaML5fj}5 z62rW9Q+=4O;-JjvS&@%q>Ft8yfk(Zaeux}6LT-WXfM2MvxMQ`du59nuPD!hS zUH8iEuejYgsut*ka`p3m9m379UihcSZ11_A=^BUU$F`cCJ?T2OJ zUskQ`_OV`mwa0I#W+7&{?dYgmTSP*;xkA%Y*ShR$$Nvo^(dIUawu+I2!yNyCu0V zYalNCx~Egl7xtFjyI6MN(0E?Da#-L4#wb!-#6#(*Mhyrm>q!0J{+}S|`-Fc?SY}Td z|7zzA56aZLG$=~xH#-B-uoNCR8KIf|m20)m2Zy|)Ts3h>=f1r!FkM*4u7L8KbO-8b%$J@k7d-Q|hlh5{2zBXS%GJsrNW$+s3k8w-0d<2*=cEuX(Z%jT zRBidK^!V%Px6OK-76c(gCGUzReX zs%h2_pBps@ekeFYYR`S`30b@!H#Bjo Date: Tue, 12 Dec 2023 17:02:33 -0600 Subject: [PATCH 09/69] #1247 (Mac Catalyst edition) - More Settings tweaks. - Fixed squished theme control swatches. - Resized toolbar buttons to Mac-appropriate sizes. - Fixed position of story detail settings popover. --- .../Classes/FeedDetailObjCViewController.m | 4 +- clients/ios/Classes/FeedsObjCViewController.m | 8 +- .../ios/Classes/FontSettingsViewController.m | 9 +- clients/ios/Classes/MenuViewController.m | 9 +- clients/ios/Classes/NewsBlurAppDelegate.h | 2 + clients/ios/Classes/NewsBlurAppDelegate.m | 384 +++++++++--------- .../Classes/StoryPagesObjCViewController.m | 5 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 6 +- .../xcschemes/Alpha Widget Extension.xcscheme | 2 +- .../xcschemes/NewsBlur Alpha.xcscheme | 2 +- .../xcshareddata/xcschemes/NewsBlur.xcscheme | 2 +- .../xcschemes/Old Widget Extension.xcscheme | 2 +- .../xcschemes/Share Extension.xcscheme | 2 +- ...ry Notification Service Extension.xcscheme | 2 +- .../xcschemes/Widget Extension.xcscheme | 2 +- .../Resources/mac/Settings.bundle/Root.plist | 2 +- .../{Story List.plist => StoryList.plist} | 2 +- ...ist Template.png => StoryListTemplate.png} | Bin 1771 -> 1787 bytes clients/ios/Resources/original_button.png | Bin 1897 -> 2760 bytes clients/ios/Resources/original_button@2x.png | Bin 4682 -> 5788 bytes 20 files changed, 241 insertions(+), 204 deletions(-) rename clients/ios/Resources/mac/Settings.bundle/{Story List.plist => StoryList.plist} (99%) rename clients/ios/Resources/mac/Settings.bundle/{Story List Template.png => StoryListTemplate.png} (59%) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index f0bc0e1012..26a679c11c 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -152,11 +152,11 @@ - (void)viewDidLoad { self.feedsBarButton = [[UIBarButtonItem alloc] initWithTitle:@"Sites" style:UIBarButtonItemStylePlain target:self action:@selector(doShowFeeds:)]; self.feedsBarButton.accessibilityLabel = @"Show Sites"; - UIImage *settingsImage = [Utilities imageNamed:@"settings" sized:30]; + UIImage *settingsImage = [Utilities imageNamed:@"settings" sized:self.isMac ? 24 : 30]; settingsBarButton = [UIBarButtonItem barItemWithImage:settingsImage target:self action:@selector(doOpenSettingsMenu:)]; settingsBarButton.accessibilityLabel = @"Settings"; - UIImage *markreadImage = [Utilities imageNamed:@"mark-read" sized:30]; + UIImage *markreadImage = [Utilities imageNamed:@"mark-read" sized:self.isMac ? 24 : 30]; feedMarkReadButton = [UIBarButtonItem barItemWithImage:markreadImage target:self action:@selector(doOpenMarkReadMenu:)]; feedMarkReadButton.accessibilityLabel = @"Mark all as read"; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index f1242056e7..fd13e48705 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -1059,7 +1059,11 @@ - (void)showUserProfile { appDelegate.activeUserProfileId = [NSString stringWithFormat:@"%@", [appDelegate.dictSocialProfile objectForKey:@"user_id"]]; appDelegate.activeUserProfileName = [NSString stringWithFormat:@"%@", [appDelegate.dictSocialProfile objectForKey:@"username"]]; // appDelegate.activeUserProfileName = @"You"; - [appDelegate showUserProfileModal:self.navigationItem.titleView]; +#if TARGET_OS_MACCATALYST + [appDelegate showUserProfileModal:self.userBarButton]; +#else + [appDelegate showUserProfileModal:self.navigationItem.titleView]; +#endif } - (IBAction)tapAddSite:(id)sender { @@ -1340,7 +1344,7 @@ - (void)updateTheme { NBBarButtonItem *barButton = self.addBarButton.customView; [barButton setImage:[[ThemeManager themeManager] themedImage:[UIImage imageNamed:@"nav_icn_add.png"]] forState:UIControlStateNormal]; - self.settingsBarButton.image = [Utilities imageNamed:@"settings" sized:30]; + self.settingsBarButton.image = [Utilities imageNamed:@"settings" sized:self.isMac ? 24 : 30]; [self layoutHeaderCounts:0]; [self refreshHeaderCounts]; diff --git a/clients/ios/Classes/FontSettingsViewController.m b/clients/ios/Classes/FontSettingsViewController.m index 6608cfb5df..21cb8e22b8 100644 --- a/clients/ios/Classes/FontSettingsViewController.m +++ b/clients/ios/Classes/FontSettingsViewController.m @@ -580,7 +580,14 @@ - (UIImage *)themeImageWithName:(NSString *)name selected:(BOOL)selected { name = [name stringByAppendingString:@"-sel"]; } - return [[UIImage imageNamed:name] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + UIImage *image = [[UIImage imageNamed:name] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + + if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomMac) { + image = [Utilities imageWithImage:image convertToSize:CGSizeMake(20.0, 20.0)]; + image = [image imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + } + + return image; } @end diff --git a/clients/ios/Classes/MenuViewController.m b/clients/ios/Classes/MenuViewController.m index c2f84a227d..918d64c637 100644 --- a/clients/ios/Classes/MenuViewController.m +++ b/clients/ios/Classes/MenuViewController.m @@ -157,7 +157,14 @@ - (UIImage *)themeImageWithName:(NSString *)name selected:(BOOL)selected { name = [name stringByAppendingString:@"-sel"]; } - return [[UIImage imageNamed:name] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + UIImage *image = [[UIImage imageNamed:name] imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + + if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomMac) { + image = [Utilities imageWithImage:image convertToSize:CGSizeMake(20.0, 20.0)]; + image = [image imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; + } + + return image; } - (UITableViewCell *)makeThemeSegmentedTableCell { diff --git a/clients/ios/Classes/NewsBlurAppDelegate.h b/clients/ios/Classes/NewsBlurAppDelegate.h index c4fb4b5ba0..4bb2de785b 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.h +++ b/clients/ios/Classes/NewsBlurAppDelegate.h @@ -445,6 +445,8 @@ SFSafariViewControllerDelegate> { - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem; - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect; - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; +//- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceNavigationController:(UINavigationController *)sourceNavigationController barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; + - (void)hidePopoverAnimated:(BOOL)animated completion:(void (^)(void))completion; - (BOOL)hidePopoverAnimated:(BOOL)animated; - (void)hidePopover; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 4f38bf572b..404bec0333 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -192,7 +192,7 @@ @implementation NewsBlurAppDelegate @synthesize remainingUncachedImagesCount; + (instancetype)sharedAppDelegate { - return (NewsBlurAppDelegate *)[UIApplication sharedApplication].delegate; + return (NewsBlurAppDelegate *)[UIApplication sharedApplication].delegate; } + (instancetype)shared { @@ -219,7 +219,7 @@ - (BOOL)application:(UIApplication *)application willFinishLaunchingWithOptions: NBURLCache *urlCache = [[NBURLCache alloc] init]; [NSURLCache setSharedURLCache:urlCache]; // Uncomment below line to test image caching -// [[NSURLCache sharedURLCache] removeAllCachedResponses]; + // [[NSURLCache sharedURLCache] removeAllCachedResponses]; dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, (unsigned long)NULL), ^(void) { @@ -253,12 +253,12 @@ - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:( if (![appOpening isEqualToString:@"feeds"]) { self.pendingFolder = appOpening; -// [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:appOpening]; + // [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:appOpening]; } [self registerBackgroundTask]; - return YES; + return YES; } - (void)applicationDidBecomeActive:(UIApplication *)application { @@ -304,31 +304,31 @@ - (BOOL)application:(UIApplication *)application shouldRestoreSecureApplicationS // state restoration disabled; uses other options now return NO; -// NSUserDefaults *preferences = [NSUserDefaults standardUserDefaults]; -// NSString *option = [preferences stringForKey:@"restore_state"]; -// -// if ([option isEqualToString:@"never"]) { -// return NO; -// } else if ([option isEqualToString:@"always"]) { -// return YES; -// } -// -// NSTimeInterval daysInterval = 60 * 60; -// NSTimeInterval limitInterval = option.doubleValue * daysInterval; -// NSInteger version = [coder decodeIntegerForKey:@"version"]; -// NSDate *lastSavedDate = [coder decodeObjectOfClass:[NSDate class] forKey:@"last_saved_state_date"]; -// -// if (limitInterval == 0) { -// limitInterval = 24 * daysInterval; -// } -// -// if (version > CURRENT_STATE_VERSION || lastSavedDate == nil) { -// return NO; -// } -// -// NSTimeInterval savedInterval = -[lastSavedDate timeIntervalSinceNow]; -// -// return savedInterval < limitInterval; + // NSUserDefaults *preferences = [NSUserDefaults standardUserDefaults]; + // NSString *option = [preferences stringForKey:@"restore_state"]; + // + // if ([option isEqualToString:@"never"]) { + // return NO; + // } else if ([option isEqualToString:@"always"]) { + // return YES; + // } + // + // NSTimeInterval daysInterval = 60 * 60; + // NSTimeInterval limitInterval = option.doubleValue * daysInterval; + // NSInteger version = [coder decodeIntegerForKey:@"version"]; + // NSDate *lastSavedDate = [coder decodeObjectOfClass:[NSDate class] forKey:@"last_saved_state_date"]; + // + // if (limitInterval == 0) { + // limitInterval = 24 * daysInterval; + // } + // + // if (version > CURRENT_STATE_VERSION || lastSavedDate == nil) { + // return NO; + // } + // + // NSTimeInterval savedInterval = -[lastSavedDate timeIntervalSinceNow]; + // + // return savedInterval < limitInterval; } - (UIViewController *)application:(UIApplication *)application viewControllerWithRestorationIdentifierPath:(NSArray *)identifierComponents coder:(NSCoder *)coder { @@ -458,11 +458,26 @@ - (void)registerDefaultsFromSettingsBundle { return; } - NSString *name = !self.isMac && !self.isPhone ? @"Root~ipad.plist" : @"Root.plist"; - NSDictionary *settings = [NSDictionary dictionaryWithContentsOfFile:[settingsBundle stringByAppendingPathComponent:name]]; - NSArray *preferences = [settings objectForKey:@"PreferenceSpecifiers"]; + if (self.isMac) { + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"Root.plist"]; + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"StoryList.plist"]; + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"Appearance.plist"]; + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"Advanced.plist"]; + } else if (self.isPhone) { + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"Root.plist"]; + } else { + [self registerDefaultsFromSettingsBundle:settingsBundle withPlistName:@"Root~ipad.plist"]; + } + NSString *version = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleShortVersionString"]; + [[NSUserDefaults standardUserDefaults] setObject:version forKey:@"version"]; +} + +- (void)registerDefaultsFromSettingsBundle:(NSString *)settingsPath withPlistName:(NSString *)name { + NSDictionary *settings = [NSDictionary dictionaryWithContentsOfFile:[settingsPath stringByAppendingPathComponent:name]]; + NSArray *preferences = [settings objectForKey:@"PreferenceSpecifiers"]; NSMutableDictionary *defaultsToRegister = [[NSMutableDictionary alloc] initWithCapacity:[preferences count]]; + for(NSDictionary *prefSpecification in preferences) { NSString *key = [prefSpecification objectForKey:@"Key"]; if (key && [[prefSpecification allKeys] containsObject:@"DefaultValue"]) { @@ -471,9 +486,6 @@ - (void)registerDefaultsFromSettingsBundle { } [[NSUserDefaults standardUserDefaults] registerDefaults:defaultsToRegister]; - - NSString *version = [[[NSBundle mainBundle] infoDictionary] objectForKey:@"CFBundleShortVersionString"]; - [[NSUserDefaults standardUserDefaults] setObject:version forKey:@"version"]; } - (void)registerForRemoteNotifications { @@ -481,15 +493,15 @@ - (void)registerForRemoteNotifications { center.delegate = self; [center requestAuthorizationWithOptions:(UNAuthorizationOptionSound | UNAuthorizationOptionAlert | UNAuthorizationOptionBadge) completionHandler:^(BOOL granted, NSError * _Nullable error){ if(!error){ - dispatch_async(dispatch_get_main_queue(), ^{ + dispatch_async(dispatch_get_main_queue(), ^{ [[UIApplication sharedApplication] registerForRemoteNotifications]; }); } }]; -// UNNotificationAction *viewAction = [UNNotificationAction actionWithIdentifier:@"VIEW_STORY_IDENTIFIER" -// title:@"View story" -// options:UNNotificationActionOptionForeground]; + // UNNotificationAction *viewAction = [UNNotificationAction actionWithIdentifier:@"VIEW_STORY_IDENTIFIER" + // title:@"View story" + // options:UNNotificationActionOptionForeground]; UNNotificationAction *readAction = [UNNotificationAction actionWithIdentifier:@"MARK_READ_IDENTIFIER" title:@"Mark read" options:UNNotificationActionOptionNone]; @@ -497,8 +509,8 @@ - (void)registerForRemoteNotifications { title:@"Save story" options:UNNotificationActionOptionNone]; UNNotificationAction *dismissAction = [UNNotificationAction actionWithIdentifier:@"DISMISS_IDENTIFIER" - title:@"Dismiss" - options:UNNotificationActionOptionDestructive]; + title:@"Dismiss" + options:UNNotificationActionOptionDestructive]; UNNotificationCategory *storyCategory = [UNNotificationCategory categoryWithIdentifier:@"STORY_CATEGORY" actions:@[readAction, starAction, dismissAction] intentIdentifiers:@[] @@ -511,7 +523,7 @@ - (void)registerForBadgeNotifications { UNUserNotificationCenter *center = [UNUserNotificationCenter currentNotificationCenter]; center.delegate = self; [center requestAuthorizationWithOptions:(UNAuthorizationOptionBadge) completionHandler:^(BOOL granted, NSError * _Nullable error){ - + }]; } @@ -635,10 +647,10 @@ - (BOOL)application:(UIApplication *)app openURL:(NSURL *)url options:(NSDiction } - (void)didReceiveMemoryWarning { - // Releases the view if it doesn't have a superview. + // Releases the view if it doesn't have a superview. [super didReceiveMemoryWarning]; - // Release any cached data, images, etc that aren't in use. + // Release any cached data, images, etc that aren't in use. [cachedStoryImages removeAllObjects]; } @@ -662,13 +674,13 @@ - (void)setupReachability { - (void)reachabilityChanged:(id)something { NSLog(@"Reachability changed: %@", something); -// Reachability* reach = [Reachability reachabilityWithHostname:self.host]; - -// if (reach.isReachable && feedsViewController.isOffline) { -// [feedsViewController loadOfflineFeeds:NO]; -//// } else { -//// [feedsViewController loadOfflineFeeds:NO]; -// } + // Reachability* reach = [Reachability reachabilityWithHostname:self.host]; + + // if (reach.isReachable && feedsViewController.isOffline) { + // [feedsViewController loadOfflineFeeds:NO]; + //// } else { + //// [feedsViewController loadOfflineFeeds:NO]; + // } } - (NSString *)url { @@ -732,17 +744,17 @@ - (NSDictionary *)getUser:(NSInteger)userId { - (void)showUserProfileModal:(id)sender { [self hidePopoverAnimated:NO]; UserProfileViewController *newUserProfile = [[UserProfileViewController alloc] init]; - self.userProfileViewController = newUserProfile; + self.userProfileViewController = newUserProfile; UINavigationController *navController = [[UINavigationController alloc] initWithRootViewController:self.userProfileViewController]; self.userProfileNavigationController = navController; self.userProfileNavigationController.navigationBar.translucent = NO; - + // adding Done button UIBarButtonItem *donebutton = [[UIBarButtonItem alloc] - initWithTitle:@"Close" - style:UIBarButtonItemStyleDone - target:self + initWithTitle:@"Close" + style:UIBarButtonItemStyleDone + target:self action:@selector(hideUserProfileModal)]; newUserProfile.navigationItem.rightBarButtonItem = donebutton; @@ -754,30 +766,30 @@ - (void)showUserProfileModal:(id)sender { } else { [self.feedsNavigationController presentViewController:navController animated:YES completion:nil]; } - + } - (void)pushUserProfile { UserProfileViewController *userProfileView = [[UserProfileViewController alloc] init]; - - + + // adding Done button UIBarButtonItem *donebutton = [[UIBarButtonItem alloc] - initWithTitle:@"Close" - style:UIBarButtonItemStyleDone - target:self + initWithTitle:@"Close" + style:UIBarButtonItemStyleDone + target:self action:@selector(hideUserProfileModal)]; userProfileView.navigationItem.rightBarButtonItem = donebutton; userProfileView.navigationItem.title = self.activeUserProfileName; userProfileView.navigationItem.backBarButtonItem.title = self.activeUserProfileName; - [userProfileView getUserProfile]; + [userProfileView getUserProfile]; if (self.modalNavigationController.view.window == nil) { [self.userProfileNavigationController showViewController:userProfileView sender:self]; } else { [self.modalNavigationController showViewController:userProfileView sender:self]; }; - + } - (void)hideUserProfileModal { @@ -824,7 +836,7 @@ - (void)showPremiumDialog { initWithRootViewController:self.premiumViewController]; } self.premiumNavigationController.navigationBar.translucent = NO; - + [self.splitViewController dismissViewControllerAnimated:NO completion:nil]; premiumNavigationController.modalPresentationStyle = UIModalPresentationFormSheet; [self.splitViewController presentViewController:premiumNavigationController animated:YES completion:nil]; @@ -879,7 +891,7 @@ - (void)addSplitControlToMenuController:(MenuViewController *)menuViewController - (void)showPreferences { if (self.isMac) { -// [[UIApplication sharedApplication] sendAction:@selector(orderFrontPreferencesPanel:) to:nil from:nil forEvent:nil]; + // [[UIApplication sharedApplication] sendAction:@selector(orderFrontPreferencesPanel:) to:nil from:nil forEvent:nil]; return; } @@ -889,7 +901,7 @@ - (void)showPreferences { } [self hidePopover]; - + preferencesViewController.delegate = self.feedsViewController; preferencesViewController.showDoneButton = YES; preferencesViewController.showCreditsFooter = NO; @@ -1026,10 +1038,10 @@ - (void)showSendTo:(UIViewController *)vc sender:(id)sender NSString *maybeFeedTitle = feedTitle ? [NSString stringWithFormat:@" via %@", feedTitle] : @""; text = [NSString stringWithFormat:@"



%@%@
%@", [url absoluteString], title, maybeFeedTitle, text]; } - + NBActivityItemSource *activityItemSource = [[NBActivityItemSource alloc] initWithUrl:url authorName:authorName text:text title:title feedTitle:feedTitle]; NSArray *activityItems = @[activityItemSource, url]; - + NSMutableArray *appActivities = [[NSMutableArray alloc] init]; if (url) [appActivities addObject:[[TUSafariActivity alloc] init]]; if (url) [appActivities addObject:[[ARChromeActivity alloc] @@ -1080,7 +1092,7 @@ - (void)showSendTo:(UIViewController *)vc sender:(id)sender [storyHUD hide:YES afterDelay:1]; } }]; - + if (!self.isPhone) { BOOL fromPopover = [self hidePopoverAnimated:NO]; [self.splitViewController presentViewController:activityViewController animated:!fromPopover completion:nil]; @@ -1116,25 +1128,25 @@ - (void)showSendTo:(UIViewController *)vc sender:(id)sender } - (void)showShareView:(NSString *)type - setUserId:(NSString *)userId - setUsername:(NSString *)username - setReplyId:(NSString *)replyId { + setUserId:(NSString *)userId + setUsername:(NSString *)username + setReplyId:(NSString *)replyId { [self.shareViewController setCommentType:type]; -// if (!self.isPhone) { -// [self.masterContainerViewController transitionToShareView]; -// [self.shareViewController setSiteInfo:type setUserId:userId setUsername:username setReplyId:replyId]; -// } else { - if (self.shareNavigationController == nil) { - UINavigationController *shareNav = [[UINavigationController alloc] - initWithRootViewController:self.shareViewController]; - self.shareNavigationController = shareNav; - self.shareNavigationController.navigationBar.translucent = NO; - } - [self.feedsNavigationController presentViewController:self.shareNavigationController animated:YES completion:^{ - [self.shareViewController setSiteInfo:type setUserId:userId setUsername:username setReplyId:replyId]; - }]; -// } + // if (!self.isPhone) { + // [self.masterContainerViewController transitionToShareView]; + // [self.shareViewController setSiteInfo:type setUserId:userId setUsername:username setReplyId:replyId]; + // } else { + if (self.shareNavigationController == nil) { + UINavigationController *shareNav = [[UINavigationController alloc] + initWithRootViewController:self.shareViewController]; + self.shareNavigationController = shareNav; + self.shareNavigationController.navigationBar.translucent = NO; + } + [self.feedsNavigationController presentViewController:self.shareNavigationController animated:YES completion:^{ + [self.shareViewController setSiteInfo:type setUserId:userId setUsername:username setReplyId:replyId]; + }]; + // } } - (void)hideShareView:(BOOL)resetComment { @@ -1142,11 +1154,11 @@ - (void)hideShareView:(BOOL)resetComment { self.shareViewController.commentField.text = @""; self.shareViewController.currentType = nil; } - -// if (!self.isPhone) { -// [self.masterContainerViewController transitionFromShareView]; -// [self.storyPagesViewController becomeFirstResponder]; -// } else + + // if (!self.isPhone) { + // [self.masterContainerViewController transitionFromShareView]; + // [self.storyPagesViewController becomeFirstResponder]; + // } else if (!self.showingSafariViewController) { [self.feedsNavigationController dismissViewControllerAnimated:YES completion:nil]; [self.shareViewController.commentField resignFirstResponder]; @@ -1252,7 +1264,7 @@ - (void)showLogin { } - (void)showFirstTimeUser { -// [self.feedsViewController changeToAllMode]; + // [self.feedsViewController changeToAllMode]; UINavigationController *ftux = [[UINavigationController alloc] initWithRootViewController:self.firstTimeUserViewController]; @@ -1285,10 +1297,10 @@ - (void)openTrainSite { // Needs a delay because the menu will close the popover. dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 0.01 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{ - [self - openTrainSiteWithFeedLoaded:YES - from:self.feedDetailViewController.settingsBarButton]; - }); + [self + openTrainSiteWithFeedLoaded:YES + from:self.feedDetailViewController.settingsBarButton]; + }); } - (void)openTrainSiteWithFeedLoaded:(BOOL)feedLoaded from:(id)sender { @@ -1298,8 +1310,8 @@ - (void)openTrainSiteWithFeedLoaded:(BOOL)feedLoaded from:(id)sender { trainerViewController.feedLoaded = feedLoaded; if (!self.isPhone) { -// trainerViewController.modalPresentationStyle=UIModalPresentationFormSheet; -// [navController presentViewController:trainerViewController animated:YES completion:nil]; + // trainerViewController.modalPresentationStyle=UIModalPresentationFormSheet; + // [navController presentViewController:trainerViewController animated:YES completion:nil]; [self showPopoverWithViewController:self.trainerViewController contentSize:CGSizeMake(500, 630) sender:sender]; } else { if (self.trainNavigationController == nil) { @@ -1450,7 +1462,7 @@ - (void)clearNetworkManager { [networkManager.requestSerializer setCachePolicy:NSURLRequestReloadIgnoringLocalCacheData]; NSString *currentVersion = [[[NSBundle mainBundle] infoDictionary] - objectForKey:@"CFBundleVersion"]; + objectForKey:@"CFBundleVersion"]; NSString *UA; if (self.isMac) { UA = [NSString stringWithFormat:@"NewsBlur Mac App v%@", currentVersion]; @@ -1553,10 +1565,10 @@ - (void)POST:(NSString *)urlString } - (void)POST:(NSString *)urlString - parameters:(id)parameters - target:(id)target - success:(SEL)success - failure:(SEL)failure { + parameters:(id)parameters + target:(id)target + success:(SEL)success + failure:(SEL)failure { [self POST:urlString parameters:parameters success:^(NSURLSessionDataTask * _Nonnull task, id _Nullable responseObject) { [self safelyInvokeTarget:target withSelector:success passingObject:responseObject]; } failure:^(NSURLSessionDataTask * _Nullable task, NSError * _Nonnull error) { @@ -1631,7 +1643,7 @@ - (void)loadFeedDetailView { - (void)loadFeedDetailView:(BOOL)transition { self.inFeedDetail = YES; popoverHasFeedView = YES; - + [feedDetailViewController resetFeedDetail]; feedDetailViewController.storiesCollection = storiesCollection; @@ -1698,24 +1710,24 @@ - (void)loadFeed:(NSString *)feedId [self reloadFeedsView:NO]; -// dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{ -// if (!self.isPhone) { -// [self loadFeedDetailView]; -// } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { -// // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self showFeedsListAnimated:NO]; -// // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; -// [self hidePopoverAnimated:NO completion:^{ -// if (self.feedsNavigationController.presentedViewController) { -// [self.feedsNavigationController dismissViewControllerAnimated:NO completion:^{ -// [self loadFeedDetailView]; -// }]; -// } else { -// [self loadFeedDetailView]; -// } -// }]; -// } -// }); + // dispatch_after(dispatch_time(DISPATCH_TIME_NOW, 1 * NSEC_PER_SEC), dispatch_get_main_queue(), ^{ + // if (!self.isPhone) { + // [self loadFeedDetailView]; + // } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { + // // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self showFeedsListAnimated:NO]; + // // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self hidePopoverAnimated:NO completion:^{ + // if (self.feedsNavigationController.presentedViewController) { + // [self.feedsNavigationController dismissViewControllerAnimated:NO completion:^{ + // [self loadFeedDetailView]; + // }]; + // } else { + // [self loadFeedDetailView]; + // } + // }]; + // } + // }); } - (void)loadTryFeedDetailView:(NSString *)feedId @@ -1728,7 +1740,7 @@ - (void)loadTryFeedDetailView:(NSString *)feedId if (social) { storiesCollection.isSocialView = YES; self.inFindingStoryMode = YES; - + if (feed == nil) { feed = user; self.isTryFeedView = YES; @@ -1737,12 +1749,12 @@ - (void)loadTryFeedDetailView:(NSString *)feedId if (feed == nil) { feed = user; self.isTryFeedView = YES; - + } storiesCollection.isSocialView = NO; -// [self setInFindingStoryMode:NO]; + // [self setInFindingStoryMode:NO]; } - + self.tryFeedStoryId = contentId; storiesCollection.activeFeed = feed; storiesCollection.activeFolder = nil; @@ -1750,8 +1762,8 @@ - (void)loadTryFeedDetailView:(NSString *)feedId if (!self.isPhone) { [self loadFeedDetailView]; } else if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; [self hidePopoverAnimated:YES completion:^{ if (self.feedsNavigationController.presentedViewController) { @@ -1804,13 +1816,13 @@ - (NSString *)widgetFolder { - (void)loadStarredDetailViewWithStory:(NSString *)contentId showFindingStory:(BOOL)showHUD { if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; [self.feedsNavigationController dismissViewControllerAnimated:YES completion:nil]; [self hidePopoverAnimated:NO]; } - + self.inFindingStoryMode = YES; [storiesCollection reset]; storiesCollection.isRiverView = YES; @@ -1894,7 +1906,7 @@ - (void)confirmLogout { [alertController dismissViewControllerAnimated:YES completion:nil]; NSLog(@"Logging out..."); NSString *urlString = [NSString stringWithFormat:@"%@/reader/logout?api=1", - self.url]; + self.url]; [self GET:urlString parameters:nil success:^(NSURLSessionDataTask * _Nonnull task, id _Nullable responseObject) { [MBProgressHUD hideHUDForView:self.view animated:YES]; [self showLogin]; @@ -1924,7 +1936,7 @@ - (void)showConnectToService:(NSString *)serviceName { self.modalNavigationController.modalPresentationStyle = UIModalPresentationFormSheet; self.modalNavigationController.navigationBar.translucent = NO; [self.splitViewController presentViewController:modalNavigationController - animated:YES completion:nil]; + animated:YES completion:nil]; } - (void)showAlert:(UIAlertController *)alert withViewController:(UIViewController *)vc { @@ -1945,7 +1957,7 @@ - (void)refreshUserProfile:(void(^)(void))callback { } - (void)refreshFeedCount:(id)feedId { -// [feedsViewController fadeFeed:feedId]; + // [feedsViewController fadeFeed:feedId]; [feedsViewController redrawFeedCounts:feedId]; [feedsViewController refreshHeaderCounts]; } @@ -1963,9 +1975,9 @@ - (void)loadRiverFeedDetailView:(FeedDetailViewController *)feedDetailView withF if (feedDetailView == feedDetailViewController) { feedDetailView.storiesCollection = storiesCollection; } - + [feedDetailView.storiesCollection reset]; - + if ([folder isEqualToString:@"river_global"]) { feedDetailView.storiesCollection.isSocialRiverView = YES; feedDetailView.storiesCollection.isRiverView = YES; @@ -2089,8 +2101,8 @@ - (void)loadRiverFeedDetailView:(FeedDetailViewController *)feedDetailView withF - (void)openDashboardRiverForStory:(NSString *)contentId showFindingStory:(BOOL)showHUD { if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone) { -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; [self.feedsNavigationController dismissViewControllerAnimated:YES completion:nil]; [self hidePopoverAnimated:NO]; @@ -2139,9 +2151,9 @@ - (void)recalculateIntelligenceScores:(id)feedId { [newFeedStories addObject:story]; continue; } - + NSMutableDictionary *newStory = [story mutableCopy]; - + // If the story is visible, mark it as sticky so it doesn't go away on page loads. NSInteger score = [NewsBlurAppDelegate computeStoryScore:[story objectForKey:@"intelligence"]]; if (score >= self.selectedIntelligence) { @@ -2203,10 +2215,10 @@ - (void)changeActiveFeedDetailRow { } - (void)loadStoryDetailView { -// if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone || self.isCompactWidth) { -// [self showDetailViewController:detailViewController sender:self]; -// feedsNavigationController.navigationItem.hidesBackButton = YES; -// } + // if ([[UIDevice currentDevice] userInterfaceIdiom] == UIUserInterfaceIdiomPhone || self.isCompactWidth) { + // [self showDetailViewController:detailViewController sender:self]; + // feedsNavigationController.navigationItem.hidesBackButton = YES; + // } self.inFindingStoryMode = NO; self.findingStoryStartDate = nil; @@ -2235,7 +2247,7 @@ - (void)loadStoryDetailView { [self deferredChangePage:params]; } } - + [MBProgressHUD hideHUDForView:self.storyPagesViewController.view animated:YES]; } @@ -2280,7 +2292,7 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { NSString *storyBrowser = [preferences stringForKey:@"story_browser"]; if ([storyBrowser isEqualToString:@"safari"]) { [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; -// [[UIApplication sharedApplication] openURL:url]; + // [[UIApplication sharedApplication] openURL:url]; return; } else if ([storyBrowser isEqualToString:@"chrome"] && [[UIApplication sharedApplication] canOpenURL:[NSURL URLWithString:@"googlechrome-x-callback://"]]) { @@ -2299,8 +2311,8 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { return; } else if ([storyBrowser isEqualToString:@"opera_mini"] && [[UIApplication sharedApplication] canOpenURL:[NSURL URLWithString:@"opera-http://"]]) { - - + + NSString *operaURL; NSRange prefix = [[url absoluteString] rangeOfString: @"http"]; if (NSNotFound != prefix.location) { @@ -2308,7 +2320,7 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { stringByReplacingCharactersInRange: prefix withString: @"opera-http"]; } - + [[UIApplication sharedApplication] openURL:[NSURL URLWithString:operaURL] options:@{} completionHandler:nil]; return; } else if ([storyBrowser isEqualToString:@"firefox"]) { @@ -2321,8 +2333,8 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { if (NSNotFound != prefix.location) { edgeURL = [[url absoluteString] - stringByReplacingCharactersInRange: prefix - withString: @"microsoft-edge-http"]; + stringByReplacingCharactersInRange: prefix + withString: @"microsoft-edge-http"]; } [[UIApplication sharedApplication] openURL:[NSURL URLWithString:edgeURL] options:@{} completionHandler:nil]; @@ -2402,9 +2414,9 @@ - (void)safariViewControllerDidFinish:(SFSafariViewController *)controller { } - (void)deferredSafariCleanup { -// if (!self.isPhone) { -// self.navigationController.view.frame = CGRectMake(self.navigationController.view.frame.origin.x, self.navigationController.view.frame.origin.y, self.isPortrait ? 270.0 : 370.0, self.navigationController.view.frame.size.height); -// } + // if (!self.isPhone) { + // self.navigationController.view.frame = CGRectMake(self.navigationController.view.frame.origin.x, self.navigationController.view.frame.origin.y, self.isPortrait ? 270.0 : 370.0, self.navigationController.view.frame.size.height); + // } [self.storyPagesViewController reorientPages]; } @@ -2437,7 +2449,7 @@ - (UINavigationController *)fontSettingsNavigationController { - (void)closeOriginalStory { if (!self.isPhone) { -// [self.masterContainerViewController transitionFromOriginalView]; + // [self.masterContainerViewController transitionFromOriginalView]; } else { if ([[feedsNavigationController viewControllers] containsObject:originalStoryViewController]) { [feedsNavigationController popToViewController:self.storyPagesViewController animated:YES]; @@ -2463,7 +2475,7 @@ - (void)showFeedsListAnimated:(BOOL)animated { // //- (void)setActiveStory:(NSDictionary *)newActiveStory { // NSLog(@"🪿 setActiveStory: %@ -> %@", activeStory[@"story_title"], newActiveStory[@"story_title"]); // log -// +// // activeStory = newActiveStory; //} @@ -2472,23 +2484,23 @@ - (void)showFeedsListAnimated:(BOOL)animated { - (void)handleUserActivity:(NSUserActivity *)activity { if ([activity.activityType isEqualToString:@"com.newsblur.refresh"]) { -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; [self.feedsViewController refreshFeedList]; } else if ([activity.activityType isEqualToString:@"com.newsblur.gotoFolder"]) { NSString *folder = activity.userInfo[@"folder"]; -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; [self loadRiverFeedDetailView:self.feedDetailViewController withFolder:folder]; } else if ([activity.activityType isEqualToString:@"com.newsblur.gotoFeed"]) { NSString *folder = activity.userInfo[@"folder"]; NSString *feedID = activity.userInfo[@"feedID"]; -// [self.feedsNavigationController popToRootViewControllerAnimated:NO]; -// [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; + // [self.feedsNavigationController popToRootViewControllerAnimated:NO]; + // [self.splitViewController showColumn:UISplitViewControllerColumnPrimary]; [self showFeedsListAnimated:NO]; if (folder != nil) { @@ -2631,7 +2643,7 @@ - (void)toggleFeedTextView:(id)feedId { #pragma mark - Unread Counts -- (void)populateDictUnreadCounts { +- (void)populateDictUnreadCounts { [self.database inDatabase:^(FMDatabase *db) { FMResultSet *cursor = [db executeQuery:@"SELECT * FROM unread_counts"]; @@ -2647,7 +2659,7 @@ - (void)populateDictUnreadCounts { - (NSInteger)unreadCount { if (storiesCollection.isRiverView || storiesCollection.isSocialRiverView) { return [self unreadCountForFolder:nil]; - } else { + } else { return [self unreadCountForFeed:nil]; } } @@ -2666,17 +2678,17 @@ - (NSInteger)allUnreadCount { NSDictionary *feed = [self.dictUnreadCounts objectForKey:key]; total += [[feed objectForKey:@"ps"] intValue]; total += [[feed objectForKey:@"nt"] intValue]; -// NSLog(@"feed title and number is %@ %i", [feed objectForKey:@"feed_title"], ([[feed objectForKey:@"ps"] intValue] + [[feed objectForKey:@"nt"] intValue])); -// NSLog(@"total is %i", total); + // NSLog(@"feed title and number is %@ %i", [feed objectForKey:@"feed_title"], ([[feed objectForKey:@"ps"] intValue] + [[feed objectForKey:@"nt"] intValue])); + // NSLog(@"total is %i", total); } - + return total; } - (NSInteger)unreadCountForFeed:(NSString *)feedId { NSInteger total = 0; NSDictionary *feed; - + if (feedId) { NSString *feedIdStr = [NSString stringWithFormat:@"%@",feedId]; if ([feedIdStr containsString:@"social:"]) { @@ -2684,7 +2696,7 @@ - (NSInteger)unreadCountForFeed:(NSString *)feedId { } else { feed = [self.dictUnreadCounts objectForKey:feedIdStr]; } - + } else { NSString *feedIdStr = [NSString stringWithFormat:@"%@", [storiesCollection.activeFeed objectForKey:@"id"]]; feed = [self.dictUnreadCounts objectForKey:feedIdStr]; @@ -2731,7 +2743,7 @@ - (NSInteger)unreadCountForFolder:(NSString *)folderName { } else { folder = [self.dictFolders objectForKey:folderName]; } - + for (id feedId in folder) { total += [self unreadCountForFeed:feedId]; } @@ -2782,7 +2794,7 @@ - (UnreadCounts *)splitUnreadCountForFolder:(NSString *)folderName { [counts addCounts:[self splitUnreadCountForFeed:feedId]]; } } else if ([folderName isEqual:@"river_global"] || - (!folderName && [storiesCollection.activeFolder isEqual:@"river_global"])) { + (!folderName && [storiesCollection.activeFolder isEqual:@"river_global"])) { // Nothing for global } else if ([folderName isEqual:@"everything"] || [folderName isEqual:@"infrequent"] || @@ -2820,7 +2832,7 @@ - (UnreadCounts *)splitUnreadCountForFolder:(NSString *)folderName { [self.folderCountCache setObject:[NSNumber numberWithInt:counts.ps] forKey:[NSString stringWithFormat:@"%@-ps", folderName]]; [self.folderCountCache setObject:[NSNumber numberWithInt:counts.nt] forKey:[NSString stringWithFormat:@"%@-nt", folderName]]; [self.folderCountCache setObject:[NSNumber numberWithInt:counts.ng] forKey:[NSString stringWithFormat:@"%@-ng", folderName]]; - + return counts; } @@ -2869,7 +2881,7 @@ - (NSDictionary *)markVisibleStoriesRead { NSMutableArray *stories = [feedsStories objectForKey:feedIdStr]; [stories addObject:[story objectForKey:@"story_hash"]]; [storiesCollection markStoryRead:story feed:feed]; - } + } return feedsStories; } @@ -2881,7 +2893,7 @@ - (void)markActiveFolderAllRead { for (NSString *folderName in self.dictFoldersArray) { for (id feedId in [self.dictFolders objectForKey:folderName]) { [self markFeedAllRead:feedId]; - } + } } } else { for (id feedId in [self.dictFolders objectForKey:storiesCollection.activeFolder]) { @@ -3021,7 +3033,7 @@ - (void)markStoryAsStarred:(NSString *)storyHash withCallback:(void(^)(void))cal - (void)markStoriesRead:(NSDictionary *)stories inFeeds:(NSArray *)feeds cutoffTimestamp:(NSInteger)cutoff { // Must be offline and marking all as read, so load all stories. - + if (stories && [[stories allKeys] count]) { [self queueReadStories:stories]; } @@ -3140,7 +3152,7 @@ - (NSInteger)adjustSavedStoryCount:(NSString *)tagName direction:(NSInteger)dire if (!newTag) { newTag = [@{@"ps": [NSNumber numberWithInt:0], @"feed_title": tagName - } mutableCopy]; + } mutableCopy]; } NSInteger newCount = [[newTag objectForKey:@"ps"] integerValue] + direction; [newTag setObject:[NSNumber numberWithInteger:newCount] forKey:@"ps"]; @@ -3201,11 +3213,11 @@ - (NSArray *)updateStarredStoryCounts:(NSDictionary *)results { [savedStories addObject:savedTagId]; [savedStoryDict setObject:savedTag forKey:savedTagId]; [self.dictUnreadCounts setObject:@{@"ps": [userTag objectForKey:@"count"], - @"nt": [NSNumber numberWithInt:0], - @"ng": [NSNumber numberWithInt:0]} - forKey:savedTagId]; + @"nt": [NSNumber numberWithInt:0], + @"ng": [NSNumber numberWithInt:0]} + forKey:savedTagId]; } - + self.dictSavedStoryTags = savedStoryDict; self.dictSavedStoryFeedCounts = savedStoryFeedCounts; @@ -3308,7 +3320,11 @@ - (void)showPopoverWithViewController:(UIViewController *)viewController content [self showPopoverWithViewController:viewController contentSize:contentSize barButtonItem:nil sourceView:sourceView sourceRect:sourceRect permittedArrowDirections:permittedArrowDirections]; } -- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { +//- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { +// [self showPopoverWithViewController:viewController contentSize:contentSize sourceNavigationController:self.navigationControllerForPopover barButtonItem:barButtonItem sourceView:sourceView sourceRect:sourceRect permittedArrowDirections:permittedArrowDirections]; +//} + +- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize /*sourceNavigationController:(UINavigationController *)sourceNavigationController*/ barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { if (viewController == self.navigationControllerForPopover.presentedViewController) { return; // nothing to do, already showing this controller } @@ -3371,7 +3387,11 @@ - (void)hidePopover { } - (UINavigationController *)navigationControllerForPopover { +#if TARGET_OS_MACCATALYST + return self.storyPagesViewController.navigationController ?: self.feedsNavigationController; +#else return self.feedsNavigationController; +#endif } #pragma mark - diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index a49ce3d0d1..2810dc0a2b 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -172,13 +172,13 @@ - (void)viewDidLoad { [separatorBarButton setEnabled:NO]; separatorBarButton.isAccessibilityElement = NO; - UIImage *settingsImage = [Utilities imageNamed:@"settings" sized:30]; + UIImage *settingsImage = [Utilities imageNamed:@"settings" sized:self.isMac ? 24 : 30]; fontSettingsButton = [UIBarButtonItem barItemWithImage:settingsImage target:self action:@selector(toggleFontSize:)]; fontSettingsButton.accessibilityLabel = @"Story settings"; - UIImage *markreadImage = [UIImage imageNamed:@"original_button.png"]; + UIImage *markreadImage = [Utilities imageNamed:@"original_button.png" sized:self.isMac ? 24 : 30]; originalStoryButton = [UIBarButtonItem barItemWithImage:markreadImage target:self action:@selector(showOriginalSubview:)]; @@ -1513,6 +1513,7 @@ - (IBAction)toggleFontSize:(id)sender { UINavigationController *fontSettingsNavigationController = appDelegate.fontSettingsNavigationController; [fontSettingsNavigationController popToRootViewControllerAnimated:NO]; +// [appDelegate showPopoverWithViewController:fontSettingsNavigationController contentSize:CGSizeZero sourceNavigationController:self.navigationController barButtonItem:self.fontSettingsButton sourceView:nil sourceRect:CGRectZero permittedArrowDirections:UIPopoverArrowDirectionAny]; [appDelegate showPopoverWithViewController:fontSettingsNavigationController contentSize:CGSizeZero barButtonItem:self.fontSettingsButton]; } diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index d784e940ec..35052ea1f2 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -3849,7 +3849,7 @@ attributes = { BuildIndependentTargetsInParallel = YES; LastSwiftUpdateCheck = 1120; - LastUpgradeCheck = 1500; + LastUpgradeCheck = 1510; ORGANIZATIONNAME = NewsBlur; TargetAttributes = { 173CB30C26BCE94700BA872A = { @@ -5611,7 +5611,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5663,7 +5662,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5902,7 +5900,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5951,7 +5948,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", diff --git a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme index 1a6ded7639..786a1accc6 100644 --- a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme +++ b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme @@ -1,6 +1,6 @@ Title Story List File - Story List + StoryList Type diff --git a/clients/ios/Resources/mac/Settings.bundle/Story List.plist b/clients/ios/Resources/mac/Settings.bundle/StoryList.plist similarity index 99% rename from clients/ios/Resources/mac/Settings.bundle/Story List.plist rename to clients/ios/Resources/mac/Settings.bundle/StoryList.plist index 086f451689..da498cfbce 100644 --- a/clients/ios/Resources/mac/Settings.bundle/Story List.plist +++ b/clients/ios/Resources/mac/Settings.bundle/StoryList.plist @@ -316,6 +316,6 @@ Icon - Story List Template + StoryListTemplate diff --git a/clients/ios/Resources/mac/Settings.bundle/Story List Template.png b/clients/ios/Resources/mac/Settings.bundle/StoryListTemplate.png similarity index 59% rename from clients/ios/Resources/mac/Settings.bundle/Story List Template.png rename to clients/ios/Resources/mac/Settings.bundle/StoryListTemplate.png index 3c91f796208e705294fd1208a23e572e0d34626c..59720a5e6f5b46ad354c9032e80eb8b9bdc82b1a 100644 GIT binary patch delta 537 zcmV+!0_Oee4f_p{7YZQ=1^@s6TKcjmks-7SG9WQDIxsRiG&qspCJ7+`0002B>Y^R9 z4gtsm3NlnNG&(RcIy5+w-~>j0*}^Yd0005GNkl=PsA?_P+`Q`k66i1~&@IK@eOk*9;aQY#kw+mY7|C1fE;5#CH&{ zf?*II$ezz&{KK9ViMwIYZ4uNFH)q8Xw@iFs=Uj~2Ao)rVC1Xv$C)T(KjViYY(;;XF z61G9*F7YvA-8>0BRu^Uj0Hu!=#00050Nklp;xzj!KqBhJJGm9Gol_-jClxqfy5VnDkElbQE0xzss;ya0d*T4vf?75uA zf9zY8xLXE27D*j(^HwZzE5wI(&d0bNk{^f2jWzx0tnm>VRc;BUBhUdPY?I1e;Zw%C z1roYeA7&t_#zYVo!6F#+!5lb31wsW{tpa-I3LHwUDekTKGv2x1W;m#NW*ICu#7yJ+ zz;Ppv9S7)TZ~*iOd+n`%_YkUjiM>#{nIw=$+$wI@&iNRtw>~*UZmj7C;QOk-^qZT2 z_yqKWXRuD?9`Pw-U6Dj7mCP?tZ>`1KLtFqQ(3Kxw*f~@nRG@V$pm%P7Aj!N8} zMwYp;rmn%ewZ_M=u4^5FN#CvGpaOQd)>nMWSf}q={hjjlm+9>v3^9a(asGgr00000 LNkvXXu0mjf-y-l< diff --git a/clients/ios/Resources/original_button.png b/clients/ios/Resources/original_button.png index 966429fa520ed7adfccea222e54fed08870b5f8b..500f82120a56fb370bf957e4df145a8598f4f2bb 100644 GIT binary patch literal 2760 zcmZ`*dpy(o8{g)VOA$F8x0tDPVY3;r=01jUTP~YQZktVPvrXo5RP<9UQfj|kbFCzz zQkL9OoD`zw(m5{C5DH1?qThGAoPOtbexKLp`99C*`MlrH=lOh|=Z|lan+tx8oVpwc z1X| zvWU*Pn?%!VMAsMB>mWf2H?dEb-1)+RClI9)<;; z24l+t3dLY~Fc_g0|EF|V#NUB`CA<>DnGr;zMB)57WD9YX4Ui~cdVH_)SNczwpV;EO zFeCz*%?OPUKa0f<6c+J6&c73E8DUHonavgxjKAT3_Wd1C{HFtcc77vZ5#p-+RHd&i zu;K-Jz(NkV%J+VIVDfY z#cdaoDBTy{aHv5V+}tbYogXw3*mLA^anVt~09M)-S@6beNgHKrT-KIVjb+sI%c!Ck zHxK${6|7HwT`^fZD9ot3Fn>tUb)fCk$#(;f7usgqb{GxDYFmGj{4yq!_GRQl_r&uF zsgjaWJ4_h`W9N=}HYTN`DkI&oy1zPVvA1%h#Xn^6`18-!3NJ+>QgFx8gF7Cj5lPmU z5N=n_p5489#1U2QA{TGP)WqXpO6`{F)o2u!Q#5_;lJ8D7WoGoVrr?@H#I8yHs6|@s zqX6U|p4&YC)PkS5pz=iR6VpOHMY*X-_sd(~S-NTLVr?No5ah$h>TI|c;wva)IyGvQ z8hg4vMa7Se$gi@ZsPYvP7INaP;^aydJnD;rl8Xxp3Vs){>a~+9&ka>;^VW4)lP%NcoLpSmiAs)V>1@C7-@+0e25|~j zCz>m#luwWMcI$N}rMJT_^^wjmoHb?WO=X_RAjcS?P{Z!?H=|uDHE(Mz5zkX@&_W&* z&hdF~VJ?S`TrVy*4p7pW!A3rNdP{U-i~fVqV;hhekE_U%>84J zp}Y3(=F>DTe420M#C!;Kf*#U!mFrZU+hnh7UF^ObyYyBxJD%Fsa9TqQ=;s5k~GZ^)y*4Gu7R;rIeA^Oo>pV8Hi%!%+#w^U(AFh+UniMzoht zzwNqb+qq+hIqz;5AMCkX`LJsC*+fLCBENmR0kN{c7#+QRp9I)6H3C$Y<)?PhEyPF% zqMl-c4>}`hyse9{k3@QI0apCl%{C?*n4bEv+`oxO7hD^%-b8jjH`NlU+hWg8Sltwbs2vXN`#$lD$-) zxTWWjJ-+D7ll8)hDhA3;;70Auj??~C^W#=keop@8t<|@pY;+0{4%{S#Cr%;~wO0RS zD*ut_eg>oC4V4j-b#5Cp%tUQ({=^q{;9@hjaI1)%;*~Yn*&V(&-MVkoczbZ8LFOqt znf=InglD)5R&h2;blM**-yy+bXF|@}K82W_a*p}=CH@zk%5qoNWY!n5Th6KQ!WGe3 z)5RzP#y~jQ+y<4o`=m!8U|Y4iGC^dRJ4HL1=ZDS+QCH^+TUG^HwuYMg`;1D}<$j@- ztGA!&3~8h0azROnBjPkKJ*6$hdGn1*%~ez*WnSHBech4|NH2H)*(H_2e14HbsZBZb znfiQeg74g=<>ki1@sb(2{DJzhV)HSa?Gp`dVDhU!=O=q?eXm8r_6voc#d~3YurE;dfI*(6=OMfFH6bRwx* zA&+^5eIVqD-Hy1Nxub4D-CiBt^(RoUIs?e>rs2ach1UhegO8|pAUE=bA?Ms)@p7c3 z%+}v%j9I$AkVpm1*4ciXs?T0x-uAinap6z1`scaRk7K0imQwK42E(a4sqfobh7*bg zH|RoJv0v`7=|1+;4YP}#8$VfCVzHkO;B7j2`3{TPX?weaOxijxdy3AZxQR+c&d5%^ zp`cqgZ*uS0Wj$QXRmhtu-{0ZYWHhR$H$T2l5V7 zY8G~nF0EY^neY*FB)Px8pAtXZ)4C-LT*CNW`5(|>5HB4TP36N7--5RE9$ z1yb`eO$bneY1)Puv}zNhiDNtVF&!;4oyl0!RBPe`LxUJ=Y)v~QkIy;xob&w=N-3smJkQ6jC|F)#jj^nb*6FethKU9MmC{j z=GgJ$l+v5i&3{Rz12Ah=R_wib_dFRB6H{c+>ocQ-D28QOl@KzSLs1lXyWj!aG`tf{{~pQZq2%s6i2JoYDo|T0RTWLMR*(`1cVS!O5yYQaP#KyjlL^= zPrZBSy<`6~z{X7_&!nWLJa04_08pqZ1pp6#5dbWJmVW>bO#v{M5V8;e7lirxYZrQZ zuQ>O0?r;9D0XF|=V?$bc>PyklIw*<)N&!mM4gf8L5W+A_{K;(mSy1?@u26^p32V7{q0lLnG)d9^+n&GH+RlSy+IFERY57esVK^ZsvUK|kGNbR z_Ui7I)qf1b>?MTgS(b&%R-?`447SZm)e`lB8vnQn#WgFc=JsW2WQh7K>T9BS7KGWyNN* zDFXl~1$;i=7H8elXTtq5vN9~0Ss7KqcZ1%f&3{{zwDNvz2fh2LMo23{Y?!hxqvTQZjLNrvxb5Trx)xh3pAx>hXA9zGc>#8N!207N-M% z&wnRb1X18D7V}*dJDxf)BVooOLddJ%1oi(lxXlDn_|E#rAI*pe5Em0WSEJEz03iG2 z^G6RKy=Cc*Mx&gRl(=YX#n!_yR_mIffuUR#>LW?tN`9QS-&^tVHi-bgcDY@T zB_t%=6%`c)x7&T%JS!rA zPN#EB4lv9}_;Lck(&bAthKIj8?^ooxcDr4wt*lG%d3}E#^^BcPOHX|`DLLuv%zv4V z7rRb&UC3QSq_VsdCcF7$=xIX3uADOEzp3JOzEO|oOOw%9*;xPd!Ek6w>Ey=aI5zSolqBhDNYLGF?ag@y-`>mF zY_=%9UcbPeXdkuP?N5%6jUCczwSUW^_4<`LIXSJpJ)bq}bhjFJ3+sS%57@}QHbX_uJ`)ChIi|YhG%tp z-CK%ZmPJv_jg5=_-QYk#2n9jZJo8+`cNxM!r7Ad%yA%;XqtTpKRaGX0aDP0{luGvw04ghX^ z@#Tr}@$s$=rN#Pi*s@I}WoEN!@kDO;B6sHgrcgWqhooE z^*c{Z3jhFGe%e$K6{Xn?1Wtv^<;L~vU;OH`o`1HVIMMZSsPF!Roe2X21J9p2^iU=WD2sEA)z~r?$ zkw>DGc2i0Z$+Fy0Q&sP}Q|@U21ntF_UNDJb)H0su3MoY{022Zy$wetWEz9zIUa$9$ hKW=<(N+Fxd{sY3312G+c{uBTJ002ovPDHLkV1nK7pu_+G diff --git a/clients/ios/Resources/original_button@2x.png b/clients/ios/Resources/original_button@2x.png index 49bf0cc1e54e927b01a75e88735fdca739098a9f..2e6e610ae78758160d6071de066989b49646b62d 100644 GIT binary patch literal 5788 zcmZ`-bzIb2(_cbCy2M3l=}>9`sihlf1?gBxi3N6%1(udl1Svu3SddF29io6pcOy$m zN~h$zc<*zc`#$gUp3iT7GiT0xXXc!l&-vp->T0W!65S;N005-wYD)T8E_Y3YH?jXe z9NjTkh6C4ERREOqF|1)HWwu7@c3N5hUaU+AxPfyQaO2tpOBy)zf6MwfLI7N>4u~Za z9Pn?M1}l4nI|A^r{MtSYOV>jUOV{(?T*cYj(bXQLWowN9$v=g8xPrv^MfimQU_r33 zq!1YUfrLaQMI z19+c`O%K<@@v#xYNJ~@lDa?i6+6D%-U84XHq$E~!u|-&ekS@-ya7m;z>u(E5 ztbDB&Um8c%oTL) z*BT1*L`bu;UI+Sn{FSGR+uwm);eYK4yFUS>wVQwtzo5W>10fvk{+-_cLS3)?3H!aP zKZ8NAM+++v=;hYW4Sd z>h-y27E?os`X&xpR#}yDloYp<^j4zb=14@F9uDK}{D=yk1+XN)LlZCWquVRdx;*0# zmCb~!q*sp9y{^Pv??JCU-f+T&-_|Hqry^I6n$Rkub!XLyRa;Wo+=sxHB})3C z4{GRNx7Tl_rz=jizlG_fLw-H=_4QqW6)eJT1g=y%4%>yPzQ7dg@_IBiHjWSp)HUSi z=c}>CMzlf2wC~ft&y$_#bEhSa$#^FWBx$sm{bpH@=p2}t8U8*q+;ps9e(^0|J8f!w zYHG@__)Bdb03SQ-Y;tF57-$ThLUd}f z_|wlBOn=J#!f~V6H46xk9mik>$H)GVGWXgVe{z|R>Q}WA61So^h%JWZKyBIv2M1#% zhAS)yZ{6Y@aX4?Xs!gG8r6Yc2jtd3g2Zg`+Od(WLU41xWStM{(?|kehnO5@ZR^$7L za#%e#kCc?t(a87Iq2%P``v;Br;o;%HAfL2A+c~hJaeD5fcV9W}Y-v7%2FL>?KylP{ z4Gphu6A2XPqoc`*h>RQrlX*=|Oe&)sD|Zd>07=dFo({Fu%mL(qD_glNH@M|w?L^iT z9Z%owKHa=%*?*g)3^}d~rQTGir((C)n+?ezP*MY_M~TFT=LQxI;9sbtsmiBRHwz8# zl@fqRJv~>VsRkWF&K&3>{QFXa(L9cwP6`UaqseplIsDW7YERoAIWI|kVtO>gb^7qs zCEcS`RM3%Qh@wAB{e-Y z>;3_X)Z1P8t5=3amIoh4?K`L3q=LQu{jHvk9wd*rcn86j_F`IRndo;w=z0(BCE8rd zE5OXc!auTB42CJ9Z7^NG4|T_|k<0Aut8HSc4oU)}+A5`lQZO->F*l`ydK-hM*g(jS zlg`gRz?^3-jXjUaWzWmt>~+jk{I!vh5%o~`UNZ?P@vRrT!P6l|x;CI{tBdvW zowvsU5QNQUIJ3XFP2JS_=`!L;RE@#HRzs45{xDZ59>v4Jnmg+gI}{y2!j<*D48GaJ z-WF_|##f)WqRnr9o}Hcb-R%`i;As11i>;hD2?DanMWN|-Y!`wlL;UFCYQ9@CSXk`q zzF&VffDoD@3naN2JW5@r0Mr>v{{vHjX1{YN(ish$$Q5CoKfeJVS!cp|*z@f({nD)w zmgML*j0Rd|ZFAGu)HgdA`E!Y+bBZ4PB9I6yJe>bp2YAIi(KTe#=9Bi-uGVU%G{2wU z;-j5kl_Jjb(5$Si$ajyxcP4I2^=M~E_{9RCH{ABkQArr=QcwXaDudrW zj!S@C#zaO&sz=$^PTU6A(7lUK-`hRZQ=r(~>Tny&ywP4G>X5RH`jOb20W4vemJBsv z(NT-AWC8-!N`8vmH$(cln>jmM*xPgQ#fj7nI3-R`Ot$jRaYHOUg5 z%HDWon46n-Ue&NaBrEr{GCzbX;b})R38RCQ^C9j!v&s9MoE$ss1*KED^07aLQ102Y zDdry@b+}O@`#qcL(UZ_}hLf`mQ%r7;LonSpt9OsPUhAkKDNy>pnC^Bi*a-f!PkelQ zo)3!d-%FCMV`@Ar;JRrec%eiD3q5`j>3BTZUENPkEN5h7B;KXng4Vxx{*y)Q-_Je@P8c zg%qB=*dFfQL`O%2LS$;Qof z29;LaK*hd8pmycFbCuPzasti zZTcQ7ole%Q6}J>}XRcJi?nU!5=Vr}7B#(OePU{zSDta;3KDVE(y>V2pn>kz4=}707 z+dZ6D`qJ38s!=FZ?!<%AfsMmnTd6LK@cTJn%{6*z#(j2IqIxB$e?uqK-|_PN*S+}R zxG@3}Ai3nlB(;6bbj>EW&a@M;{e+y)BqHHW9yk>kehA11C@Cw4b~HgQ7(}|fRwZTN zx-z+h`ysR&yQ=Cu>ZT#VAxyCGs-%+-2Jny&y#AVJ(+lGz=F=X-aVeLn5Ee(qw@UY9 zpC{bGc&{)f9UX|9qIj0S8z8KMTVoAwQU$A85faQ>=+6XMnRX6o<3zlEtr2uK?TP}% z6zu7airp7h=t)XUWE_6sM2qI+49IqZT@f25^czb5>Y-XNEz4YuOm-g(iOS!TX}WBt zRL{=I!JC(E0>A4aE&saUNO`7)T9THQHYX(|Emm@xzyFm+Ohkl3Rav=_M8JxG^9!*9 zuMe}os+4F~eT@U;mSnROC=T**qGv%e0Tno8rw+2bt!~Hun zGpvUR?&53hx?B;;u4kkdVSG}}Q+0)TdCS>;t$^G+U563h+wcLyPeA*J$FIxt?Y;Av z<~BKE0F57eus}K?7o;zuPeX2QF9n7ihy>M+M z-cW$B$5r(nl4gjAiET8FYVeKGGc=%OTh3ow`d|Kte);mHyeYKdfjC3!A+)e-%p|tU zl+L+#k|Pxtl&EHu#`k`ykRMO(p_Q|s)Ry=bn76cWnD?M*Nqw+hFg5+TBHQfzTw?Z8TmIx?>%*4qVJ+#Iz#8D3THbdYZ6t}ps(Q2*ZYMybX88*zL-r4P-+1<1>6crV@ z4Q=o%t*K#^76b*#x~+cBP`6OfZ;`$@{ba#z{=i~Dkv)4CMvs3nuhsI>+Vsm%GzIm# z&*5nXFjk9X=Bs~-rSI;|6tGj=(F~H2N?Aq8pXxTuIzdt0u2bOO^CnKOcDWhNCeeqi!ecM!!jD%$v+AjMjao ziVgPa=svRi!ncpeYjrIH?u&>d`vl5W^x=uqs$*}q%PyxBH^6)|>Llw&fLs%Wpw#$+ zSHh3Y#c6-9X`fozM^wNFaWltp&oz@3nmeUZ=DNCWI#3OTa{XeH=)%5<+@)Lsx4hVZ z=Hs(bwkBdu9ksq-$b6LbFu!+T-Q{`c;se`|IBn^yBT38-ntrypV>O)MU> zu$P=mFghtQG11AqIx+rSj~V$sp&BjT%PRh=?FjY}v*lEw zjl*YmNZDn)iOKDSqqF=skrk*ZElo*c89 zWinN8d0#b=HwX6YGcft<*-yWfUdx1R(LY|JTPqYo(d5Rv1okaoe!64?ULoK=MM#JN z?xW}D`?D0~T9o^^sAbl?l<~$oWq1;0+Q&DEiHY z4|*Z~CMge%hDiIJBEsp}27HxN!oFE&8{sa7A2b%L_+ELz3I=WdP$(s$M!Tft97NF# zyK1UM1yCe`^a+|^B6P2%E{#uhHgw_aAzfzg+h{)h(Hc8%U6aG|9!)pYCV)#wB@ha! z86S93;4(7yC`*f?&ezvL_Hu5R^0owgS@Ts)S3chCRpr9h9{+_Z%1(D9eS7WBP9JiC z5#CiB-=|s#ph9;0%n#o!3BS{WvMrr9ew;wOhZ{d9e_igJs;?qF^l))ph;b~PxoLRO zUa^yHPedO)Nay;37c@TR>fm?CZ0u}%l=bmGP9`tj!sTYw=QXg2SQv z8ysv*uGQ6pB+qEkJla2wpR8>%xEqy^e)Q@%h`z>|N5Zk#jm28@^9_@Go$w9A*_Vnl zPHz69M3WU99l_uhUTcxNzp5Xk6%r@^Ql=_trZwJy^{zOaQ8Zn z?SimAes;)uP{!clj?tZ8RBBg(}&&xr-z9g?JkQ?&!u~;$XGSJx{%0Fx%4Hy=BqmbM=7ylp42pGohgTV-G+qWtXZ|K zdqE0As>zRCB3@JkL4@LWn}A0dTg4yDs{GE5?C7WTxUO6PMz81}6gBDbCKa`Oh{B_A zZbk4d#SgL*guyR9ui$SHGb7J;^N&APU!|Rxa67mJ9Q~a9BYT6`wS8s&7&k%*AU|y~ zcJo#7K`VQaYcQr-j?5=j)(&iHVSd@srN3irnG_y7uRrRL;GqtpM5EE(Gw$Dt<993I zQp>fdKuSV3hCWKN7cGX?#v6Fs%E^``+hPfyF50I@#YN0VzX8dn%4jb*0Y+#xJtL!4 z(T{G+NFA?Z28>ohWWUX#UeTAB#t=CcMu+X$S4zaR2hrY0{=ilXPcoleSRGjsj&Z zo6D%9KYotm&+Q{4ID#u8t585thH;ts>3|EOg34z`WN~C2mVzKHP-JZjl(x_|Eh)|R z^6u}CCMBfV`a}Hw)4X@@Irp6N?!D*Sb59AS6rn%}3HiP&AcPQ&Mx*ZDqg#PqpEOjd zR1|O=m&q_pJRw9z2w?$$QtG9EQ}B9iZjYy`)!zD3U0wa|vuDm7wb^Vfw=GEf7@=jn z69!Ny6tevM+z~0MDSuSODTgW)ig<-WE|SS)gyT5EFf15`0003%0YDHa1i=fp+f5w~ zC%tjwhP&0?diA>9zUR`#%b!g++zR#%JiEUAUWBf&&{*mZ!{U7iC4#EY7?|l zxm?b$EE{%=aN|=-!Sg(AYjX%To6XtM-119pZQUowelGj6uCD%SS7p1S22dyzvVjBp z|1KjleO5w3LU(O~hT}MnA%yt(1TqUfV0fV^r4%lg3pQJ`v$3J^Tvb)|s=a&medcnx z++F0=3Ab&4^o;bh!9xZvHyV@2=yZt+j^o%cig%JJ7XE?qQA(-X?e;b_G`3#8T(f=a z_uHSc*=*Ol$|n%FWq<(#`;W@Y&;7t`Hkp-5g_vO&65)#nzY_v}lMsD^e^bA#f*{b= z)*GIx%Bs_cN`HFv#EFw-(PaJA4Nz1x;=e7{oVUy=CXHAuX4^?Rhys~X+*DZXdv}~t zx(uYXGcwI-Ot2WU_?-rT|#^{PbCCL-8IpdZcvp zm?DcrBAE(c3LzvJHw_2hScFnaBoc`@Gb_U=l}T3vV0P`?^F>#6>GBL1Su}iVLBGB$ zlamb^hGEzcU!4MQQ%b7@L0Gxt$6cR)_pk470Klr%E8+oE0T@2xxtYhJ3HSIDk0n{H zIa3KCPZL5i&^}uTZn{rGDWzVom%ma|Ydd(L^pE@Y9r!*JuU};eBF`k!7n))g=F=pX(|*Qam(5<h&zaL_kNo4KkB16^AOzBe)02sH z@G)bG=4E806^KNvC}`UIl|R?m*fb>;<;^K(gUMvn$2vB{Fbn|9df}D7w=}mr;@ekv z1b7fa2sn-tWp&HSpFCyaLO)GZ-m!ElDCje!SFfIbkjZ3Xbl#P{l+vmT=PU1BIDhex zSoo!-ruNS3lRqhzzQgVwIz^kHRr{akzOisuV?*OZl+s!NUflc$06?Tt374JI`|)1A zduGSNKdJ!~3WcFo;a_2_in>m?X9hW?-wpua2!L6+ZQb?+3t3`-=UPc5ikn~Az)dS&B#a}m7U$IPc&Jg8o-)sd0Z}+D{<5P_~8`< zq4cfgZ+{StM<-CqN+lBX?A2qMOeT{AHy2MVelf?IvrMPg?cTC^TgYYQ@pyzev*(up zI7n~qa2?B2sZy%)`{X?pO_YcRNJ%l9%qgbPOnZ#mA%43Kw57c4`1FQr4bEtT1cQQr z0x;+e1N#>A9TC_zZv40z7HiHjy+MzwSFe5D+S(fSz~gecgz~cE(*XE$1|742VHh?w zEp?2+pw~r`C87bWd6sc1l}d}whZ-->^BdQ$`LrSueK?3jEC&EmxfFT%xo@8P%iwJ_UaxB?*N1~5tfUK;{i7d;C zg1$bbl(y~Mv3q4S;dHu0jY_4G2fwG3^8VjOld%T@==cfaXIk?tZ|M!~-qGc9Z8>3Y6MxqV7OG+dXPN&oL4Iw-PL8#dL-Ih+#Rwo`( zsbuI~!(Mba99MVzd&j$Ox7(vj(rME(GF~Gf*9pP5JnvaKu6XQAIo6z#Btu7N;5dKw z{I}8g1MT58&+|(urMCoc{$z_mpA>Nhgf)Q4Y&6Q{vV;&#wAt3YF&5!!F4xpsEIFTL zTXNPV8HqLo%?gIcYangiWGP`BoqC5aN9nOEUB9=?fNY^_Zjy9GR zv0lL|Fbu;;rBZp@jkfD`b#)EC?(+bk_DXF|T6)?$zC>$BblAQOGP42f?~(zGW@9cP zfk*rfI1U~tjXCH6fHfa|d~xPWFO--~rVloJ`SsH*%QE99jDJ1b(tEx^Z=e)3aFCX= zqh%W=PM-L&N~P+@d)!TgVL65&asqRNV9u9TxR^y+UY;@Q};p61`qO_}tm^D*?cg zYk5YmPdc^s!?n+pmmhBhu(#x`B}PunEv~&%yS}NZsYw+dR}V_{3?n&8K$dOzV#DTr z`}Vg+8b=TWM~E{JiHnO%4o4H-3vd#DTew_aRH#%cd48X~(HU8pbCfD&-}18K*W%*h zlqQp@*x_*O^eGPjICJ_;v&Cw`=I_4W?(ul2*TeH-PQ2*-zkaab%;__oLg7#l1R>;^ zSImh+=j9P8B}Pcd2#iu1QyOi^kio@ind#G#^hw3K#6(DCQpjY|42#tQg;F7IyU}*{ zh@ufwj~qVoZCZM2cXfRHivYHqsyJ0Q?5<&BD5X`ezBqfX)9L8Sj0+XlP(p||96?xR zyk75hgh*WzLOR9nek3Owl6qzL9&IuirzIpN4$^8hP^y$2{@LwvpUbgiPbY*xu8>Rm z^v(asWHvpiQYrKGNlEl@=}*rCK(FkcPq>_}wk|3U-}(;(1puDs8$xsk5Frw5Yis*C z^cYOSs?{s*2}PqzO48|ci6k{Gb$wcT>Z5A4`d)@G6Bxo2vkW_;p`o$aWHycUF``D3 zk?Ya3$6Z<7vYdif7`cAk`ZGg@4xV5#njWBpj+lJUqy^E{ZiiN@jb|99Q`89nEiEl! zy;j&6aQSjgg~?O*5XG`Ix`$RK=-Z%FD`6hU5A0Bfl?E zC>4Go2_XbZrJ~B#U>jSqWa+t4V@5rZozr`sIzE1uK)qSnIoZ!L3@hKcW9R&6_k!6u zy$XG)^gvJm-iuY2PKBcjI|ELgJarsEr_G097__yu`5$)$ws}0BuxpKB7?P5fGBR+N zCLnbU_17Lebm&mtgAYHrGdIt=I8m24g?aYFlzRwTeqyy z{@Z3he5YjB$&(df!}hQ$#QeX%j-gem~!Sh64CB%A^O z7(%KWu3nw6VD5r4O}v^hrDGgo*l;b!_pT7CiL?MI?g{-X1Oq1C(j$s%OLe5>Szcy*{qNOJQ zVB6Lo-Xnzk0stU{_eKyl6eJP}m!F^eoLDRlUjG2#sjRH}S0ws~2KeWu@3sSI!mXKC z5JH%=^wi0{vwK-%5wd&F?t|h~Dl;LZuD<^2!%N;QIqq+(uCBIUZ*E!cdlZK{d_+RO z!h(_J6w`2Yp8gcTwe4H~{hvtm5e;zRLS>`d?f!vM8u(QRAp~)8ajF3W3KvHcP$Uwu zS>3z+56g)g?9KKmOW$1jQ*fWPYd-mg5OR@W8D&>OZH-2w?$bAKzCxi;2HTBN>UFtX zn=f6uY>UJr>N+kvR{mE2{-CHM`N%K~o1T$Ae0=fP>5=I3`{oZd==GLXyM6rJH|B-R z9#>UXH9H*6H6m8T#gfYQV`S0rm(6CA)prRJd|A^}dhqaj;lg7-q8eb`XI~uldcC_S zrG6LJk$6-naU3U3OH1ueKpvPodtRq6ABa7>_H5vJ{;*1=R78^r z0Pebb=zYC<_nan?NF*V?zQ^P7Z2so!O~=EKcOWKfmmWMcd+?A!!vJW}ZZy9Uh{2$b zFX-22mECTyJ@#{1X&{{-2twtB%B^2-_@>kGH5l8rY&$6ui9|f_<)iiOKcH|#LBGE5 zCMG7xL)=aa0P1({*d29w0$sQqUA|(;Qn6V4Cqjr60e1wHQp)qZx2meT?!f-ihmZVp zG~%XnyBIWh;JCuVf)CTuQw=Q3GC>BQl+qgxhhx#JFV9&PNO#+pqwl}>_eGS_6F#@t zQ42r_AtI58O;1lZ3?4FYWeYg6Lz^mPJ}?j>zLdGpYH#({)Lf~#c=6KPKOa4|zOJtRS}el&DoAVq=FfO;<~*G) zQQLn&;S*-F>3O5cm>3tQVhJJsTdEGheH(OLeO>*OC5x9I4aNIcEs|Qac>Y65rD8Q9 zBpx?6(vCd=06ZQKZES4f8yg#IZB4e#7cNwNcJ|EKGP}LCEgauft5-bca=F&nnrx`8 ztN)K!EXvX)ChD{a35-a@`ZK+uY7-jlcKefaX3r0sO8?cX#Fi{wbiYg{eaANwM7VW1 zb=+6-JWuU*JGI%GJ&rcVb(hOk<#xL(T`u<}kH^ypfQm#SjZ`X0RmZERFbpG?Nu^M$ z)levvLKwvZr+vNMZhv~t?D<UaFVyg`61xs#mzoBr)Dn%zv zo{%uIX!r_3$P_||5uDJ_(?=ucR$h z8EiBJ?(hO3q>WNq!Smj~tX}o*hU)5zQKQ9941df27wA8rP;uY=_w<)aB@Y8A0-z7K zM}(G@3ILn{E(6%^bUHVD`tiCWCr_U0axq2#(~$4)hyemIaZ<5j*j+< Date: Tue, 12 Dec 2023 20:24:38 -0600 Subject: [PATCH 10/69] #1247 (Mac Catalyst edition) - Added Mute, Organize, Widget Sites etc commands to the File menu. - Added Columns, Text Size, and Spacing commands to the View menu. - Investigated nav bar theming; appears to be outside scope of window. - Investigated menu disabling; WIP. --- .../Classes/FeedDetailObjCViewController.m | 14 ++ clients/ios/Classes/FeedsObjCViewController.h | 13 ++ clients/ios/Classes/FeedsObjCViewController.m | 70 +++++++++ clients/ios/Classes/NewsBlurAppDelegate.h | 1 - clients/ios/Classes/NewsBlurAppDelegate.m | 6 +- .../ios/Resources/MainInterface.storyboard | 146 ++++++++++++++++++ 6 files changed, 244 insertions(+), 6 deletions(-) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 26a679c11c..cf70d2e5ed 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -3125,6 +3125,16 @@ - (void)updateTheme { [self reload]; } +//- (BOOL)canPerformAction:(SEL)action withSender:(id)sender { +// NSLog(@"canPerformAction: %@ withSender: %@", NSStringFromSelector(action), sender); // log +// +// if (action == @selector(deleteSite:)) { +// return NO; +// } +// +// return YES; +//} + #pragma mark - #pragma mark Story Actions - save @@ -3184,6 +3194,10 @@ - (IBAction)instafetchFeed { } } +- (IBAction)deleteSite:(id)sender { + //TODO +} + #pragma mark - #pragma mark PullToRefresh diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index 1cc428287c..7640d5344c 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -99,9 +99,22 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { - (void)didSelectSectionHeader:(UIButton *)button; - (void)didSelectSectionHeaderWithTag:(NSInteger)tag; - (void)selectNextFolderOrFeed; + - (IBAction)reloadFeeds:(id)sender; - (IBAction)selectIntelligence; +- (IBAction)showMuteSites:(id)sender; +- (IBAction)showOrganizeSites:(id)sender; +- (IBAction)showWidgetSites:(id)sender; +- (IBAction)showNotifications:(id)sender; +- (IBAction)showFindFriends:(id)sender; +- (IBAction)showPremium:(id)sender; +- (IBAction)showSupportForum:(id)sender; +- (IBAction)showLogout:(id)sender; +- (IBAction)chooseColumns:(id)sender; +- (IBAction)chooseFontSize:(id)sender; +- (IBAction)chooseSpacing:(id)sender; - (IBAction)chooseTheme:(id)sender; + - (void)markFeedRead:(NSString *)feedId cutoffDays:(NSInteger)days; - (void)markFeedsRead:(NSArray *)feedIds cutoffDays:(NSInteger)days; - (void)markEverythingReadWithDays:(NSInteger)days; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index fd13e48705..a1ef815ffc 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -2864,6 +2864,76 @@ - (void)finishRefreshingFeedList:(NSDictionary *)results feedId:(NSString *)feed }); } +//- (BOOL)canPerformAction:(SEL)action withSender:(id)sender { +// NSLog(@"canPerformAction: %@ withSender: %@", NSStringFromSelector(action, sender); // log +// return YES; +//} + +- (IBAction)showMuteSites:(id)sender { + [self.appDelegate showMuteSites]; +} + +- (IBAction)showOrganizeSites:(id)sender { + [self.appDelegate showOrganizeSites]; +} + +- (IBAction)showWidgetSites:(id)sender { + [self.appDelegate showWidgetSites]; +} + +- (IBAction)showNotifications:(id)sender { + [self.appDelegate openNotificationsWithFeed:nil]; +} + +- (IBAction)showFindFriends:(id)sender { + [self.appDelegate showFindFriends]; +} + +- (IBAction)showPremium:(id)sender { + [self.appDelegate showPremiumDialog]; +} + +- (IBAction)showSupportForum:(id)sender { + NSURL *url = [NSURL URLWithString:@"https://forum.newsblur.com"]; + [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; +} + +- (IBAction)showLogout:(id)sender { + [self.appDelegate confirmLogout]; +} + +- (IBAction)chooseColumns:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"split_behavior"]; + + [UIView animateWithDuration:0.5 animations:^{ + [self.appDelegate updateSplitBehavior:YES]; + }]; + + [self.appDelegate.detailViewController updateLayoutWithReload:NO fetchFeeds:YES]; +} + +- (IBAction)chooseFontSize:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_font_size"]; + + [self.appDelegate resizeFontSize]; +} + +- (IBAction)chooseSpacing:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_spacing"]; + + [self reloadFeedTitlesTable]; + [self.appDelegate.feedDetailViewController reloadWithSizing]; +} + - (IBAction)chooseTheme:(id)sender { UICommand *command = sender; NSString *string = command.propertyList; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.h b/clients/ios/Classes/NewsBlurAppDelegate.h index 4bb2de785b..9aefe16a9d 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.h +++ b/clients/ios/Classes/NewsBlurAppDelegate.h @@ -445,7 +445,6 @@ SFSafariViewControllerDelegate> { - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem; - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect; - (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; -//- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize sourceNavigationController:(UINavigationController *)sourceNavigationController barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; - (void)hidePopoverAnimated:(BOOL)animated completion:(void (^)(void))completion; - (BOOL)hidePopoverAnimated:(BOOL)animated; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 404bec0333..0b99dafb1b 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -3320,11 +3320,7 @@ - (void)showPopoverWithViewController:(UIViewController *)viewController content [self showPopoverWithViewController:viewController contentSize:contentSize barButtonItem:nil sourceView:sourceView sourceRect:sourceRect permittedArrowDirections:permittedArrowDirections]; } -//- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { -// [self showPopoverWithViewController:viewController contentSize:contentSize sourceNavigationController:self.navigationControllerForPopover barButtonItem:barButtonItem sourceView:sourceView sourceRect:sourceRect permittedArrowDirections:permittedArrowDirections]; -//} - -- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize /*sourceNavigationController:(UINavigationController *)sourceNavigationController*/ barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { +- (void)showPopoverWithViewController:(UIViewController *)viewController contentSize:(CGSize)contentSize barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { if (viewController == self.navigationControllerForPopover.presentedViewController) { return; // nothing to do, already showing this controller } diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index f7dae1270f..1bdbd68169 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -271,6 +271,67 @@
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -282,6 +343,82 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -401,6 +538,15 @@ + + + + + + + + + From 8dac36651c1cc88c11a483487d67c6e4be028155 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 4 Jan 2024 14:19:19 -0500 Subject: [PATCH 11/69] #1247 (Mac Catalyst edition) - Moved the user profile button to above the feeds list, since it was too cramped in the toolbar (and more like on web). --- clients/ios/Classes/FeedsObjCViewController.h | 3 + clients/ios/Classes/FeedsObjCViewController.m | 66 +++++++++++-------- clients/ios/Classes/NewsBlurAppDelegate.m | 2 +- .../ios/Resources/MainInterface.storyboard | 1 + 4 files changed, 43 insertions(+), 29 deletions(-) diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index 7640d5344c..af8f7215c1 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -56,6 +56,7 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIView *innerView; @property (nonatomic) IBOutlet UITableView *feedTitlesTable; +@property (nonatomic) IBOutlet NSLayoutConstraint *feedTitlesTopConstraint; @property (nonatomic) IBOutlet UIToolbar *feedViewToolbar; @property (nonatomic) IBOutlet UISlider * feedScoreSlider; @property (nonatomic) IBOutlet UIBarButtonItem * homeButton; @@ -63,8 +64,10 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic) IBOutlet UIBarButtonItem * settingsBarButton; @property (nonatomic) IBOutlet UIBarButtonItem * activitiesButton; #if TARGET_OS_MACCATALYST +@property (nonatomic) IBOutlet UIBarButtonItem * spacerBarButton; @property (nonatomic) IBOutlet UIBarButtonItem * userBarButton; #endif +@property (nonatomic) IBOutlet UIView *userInfoView; @property (nonatomic) IBOutlet UIButton *userAvatarButton; @property (nonatomic) IBOutlet UILabel *neutralCount; @property (nonatomic) IBOutlet UILabel *positiveCount; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index a1ef815ffc..86dc576c35 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -1060,7 +1060,7 @@ - (void)showUserProfile { appDelegate.activeUserProfileName = [NSString stringWithFormat:@"%@", [appDelegate.dictSocialProfile objectForKey:@"username"]]; // appDelegate.activeUserProfileName = @"You"; #if TARGET_OS_MACCATALYST - [appDelegate showUserProfileModal:self.userBarButton]; + [appDelegate showUserProfileModal:self.userAvatarButton]; #else [appDelegate showUserProfileModal:self.navigationItem.titleView]; #endif @@ -2949,7 +2949,14 @@ - (void)resetToolbar { - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { #if TARGET_OS_MACCATALYST - int yOffset = -5; + int xOffset = 60; + int yOffset = 10; + + [self.userInfoView removeFromSuperview]; + + self.userInfoView = [[UIView alloc] + initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)]; + self.userInfoView.backgroundColor = UIColorFromLightSepiaMediumDarkRGB(0xE0E0E0, 0xFFF8CA, 0x4F4F4F, 0x292B2C); #else if (!orientation) { orientation = self.view.window.windowScene.interfaceOrientation; @@ -2961,26 +2968,26 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { isShort = YES; } + int xOffset = 50; int yOffset = isShort ? 0 : 6; + + self.userInfoView = [[UIView alloc] + initWithFrame:CGRectMake(0, 0, + self.navigationController.navigationBar.frame.size.width, + self.navigationController.navigationBar.frame.size.height)]; #endif - UIView *userInfoView = [[UIView alloc] - initWithFrame:CGRectMake(0, 0, - self.navigationController.navigationBar.frame.size.width, - self.navigationController.navigationBar.frame.size.height)]; + // adding user avatar to left NSURL *imageURL = [NSURL URLWithString:[NSString stringWithFormat:@"%@", [appDelegate.dictSocialProfile objectForKey:@"large_photo_url"]]]; userAvatarButton = [UIButton systemButtonWithImage:[UIImage imageNamed:@"user"] - target:self action:@selector((showUserProfile))]; + target:self action:@selector(showUserProfile)]; userAvatarButton.pointerInteractionEnabled = YES; userAvatarButton.accessibilityLabel = @"User info"; #if TARGET_OS_MACCATALYST userAvatarButton.accessibilityHint = @"Double-click for information about your account."; CGRect frame = userAvatarButton.frame; - frame.origin.y = -8; - frame.size.width = 38; - frame.size.height = 38; userAvatarButton.frame = frame; #else userAvatarButton.accessibilityHint = @"Double-tap for information about your account."; @@ -3000,55 +3007,52 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { image = [image imageWithRenderingMode:UIImageRenderingModeAlwaysOriginal]; UIButton *button = strongSelf.userAvatarButton; [button setImage:image forState:UIControlStateNormal]; -#if TARGET_OS_MACCATALYST - strongSelf.appDelegate.feedDetailViewController.navigationItem.leftBarButtonItems = @[[[UIBarButtonItem alloc] initWithCustomView:[UIView new]]]; -#endif } failure:^(NSURLRequest * _Nonnull request, NSHTTPURLResponse * _Nonnull response, NSError * _Nonnull error) { NSLog(@"Could not fetch user avatar: %@", error); }]; - [userInfoView addSubview:userAvatarButton]; + [self.userInfoView addSubview:userAvatarButton]; - userLabel = [[UILabel alloc] initWithFrame:CGRectMake(50, yOffset, userInfoView.frame.size.width, 16)]; + userLabel = [[UILabel alloc] initWithFrame:CGRectMake(xOffset, yOffset, self.userInfoView.frame.size.width, 16)]; userLabel.text = appDelegate.activeUsername; userLabel.font = userLabelFont; userLabel.textColor = UIColorFromRGB(0x404040); userLabel.backgroundColor = [UIColor clearColor]; userLabel.accessibilityLabel = [NSString stringWithFormat:@"Logged in as %@", appDelegate.activeUsername]; [userLabel sizeToFit]; - [userInfoView addSubview:userLabel]; + [self.userInfoView addSubview:userLabel]; [appDelegate.folderCountCache removeObjectForKey:@"everything"]; yellowIcon = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"g_icn_unread"]]; - [userInfoView addSubview:yellowIcon]; + [self.userInfoView addSubview:yellowIcon]; yellowIcon.hidden = YES; neutralCount = [[UILabel alloc] init]; neutralCount.font = [UIFont fontWithName:@"WhitneySSm-Book" size:12]; neutralCount.textColor = UIColorFromRGB(0x707070); neutralCount.backgroundColor = [UIColor clearColor]; - [userInfoView addSubview:neutralCount]; + [self.userInfoView addSubview:neutralCount]; greenIcon = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"g_icn_focus"]]; - [userInfoView addSubview:greenIcon]; + [self.userInfoView addSubview:greenIcon]; greenIcon.hidden = YES; positiveCount = [[UILabel alloc] init]; positiveCount.font = [UIFont fontWithName:@"WhitneySSm-Book" size:12]; positiveCount.textColor = UIColorFromRGB(0x707070); positiveCount.backgroundColor = [UIColor clearColor]; - [userInfoView addSubview:positiveCount]; + [self.userInfoView addSubview:positiveCount]; - [userInfoView sizeToFit]; + [self.userInfoView sizeToFit]; -// userInfoView.backgroundColor = UIColor.blueColor; +// self.userInfoView.backgroundColor = UIColor.blueColor; #if TARGET_OS_MACCATALYST - self.userBarButton = [[UIBarButtonItem alloc] initWithCustomView:userInfoView]; -// userInfoView.backgroundColor = UIColor.redColor; - self.navigationItem.leftBarButtonItem = self.userBarButton; + [self.innerView addSubview:self.userInfoView]; + + self.feedTitlesTopConstraint.constant = 50; #else - self.navigationItem.titleView = userInfoView; + self.navigationItem.titleView = self.userInfoView; #endif } @@ -3058,6 +3062,12 @@ - (void)refreshHeaderCounts { return; } +#if TARGET_OS_MACCATALYST + int yOffset = 2; +#else + int yOffset = 0; +#endif + userAvatarButton.hidden = NO; [appDelegate.folderCountCache removeObjectForKey:@"everything"]; @@ -3074,13 +3084,13 @@ - (void)refreshHeaderCounts { yellowIcon.frame = CGRectMake(CGRectGetMinX(userLabel.frame), CGRectGetMaxY(userLabel.frame) + 4, 8, 8); neutralCount.frame = CGRectMake(CGRectGetMaxX(yellowIcon.frame) + 2, - CGRectGetMinY(yellowIcon.frame) - 2, 100, 16); + CGRectGetMinY(yellowIcon.frame) - 2 - yOffset, 100, 16); [neutralCount sizeToFit]; greenIcon.frame = CGRectMake(CGRectGetMaxX(neutralCount.frame) + 8, CGRectGetMinY(yellowIcon.frame), 8, 8); positiveCount.frame = CGRectMake(CGRectGetMaxX(greenIcon.frame) + 2, - CGRectGetMinY(greenIcon.frame) - 2, 100, 16); + CGRectGetMinY(greenIcon.frame) - 2 - yOffset, 100, 16); [positiveCount sizeToFit]; yellowIcon.hidden = NO; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 0b99dafb1b..9c4bb31150 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -3297,7 +3297,7 @@ - (void)showPopoverWithViewController:(UIViewController *)viewController content UITableViewCell *cell = (UITableViewCell *)sender; [self showPopoverWithViewController:viewController contentSize:contentSize sourceView:cell sourceRect:cell.bounds]; - } else if ([sender class] == [UIBarButtonItem class]) { + } else if ([sender class] == [UIBarButtonItem class] || [sender class] == [UIButton class]) { [self showPopoverWithViewController:viewController contentSize:contentSize barButtonItem:sender]; } else if ([sender class] == [UIView class]) { [self showPopoverWithViewController:viewController contentSize:contentSize sourceView:sender sourceRect:[sender frame]]; diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 1bdbd68169..b0a2332666 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -228,6 +228,7 @@ + From c94e517a8835ddf10598651855b17ad5666bb414 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 5 Jan 2024 22:02:43 -0500 Subject: [PATCH 12/69] #1247 (Mac Catalyst edition) - Moved globally-relevant methods to BaseViewController, so their menu items are always available. - Big refactor to eliminate redundant appDelegate properties. - Work in progress on reimplementing the navigation bars as a Mac toolbar. --- clients/ios/Classes/AddSiteViewController.h | 7 +- clients/ios/Classes/AddSiteViewController.m | 1 - clients/ios/Classes/BaseViewController.h | 22 ++++- clients/ios/Classes/BaseViewController.m | 94 ++++++++++++++++++- .../ios/Classes/DetailViewController.swift | 5 - .../ios/Classes/FeedChooserViewController.h | 4 +- .../ios/Classes/FeedChooserViewController.m | 3 - .../Classes/FeedDetailObjCViewController.h | 4 - .../Classes/FeedDetailObjCViewController.m | 12 ++- clients/ios/Classes/FeedsObjCViewController.h | 18 ---- clients/ios/Classes/FeedsObjCViewController.m | 80 +--------------- .../FirstTimeUserAddFriendsViewController.h | 5 +- .../FirstTimeUserAddFriendsViewController.m | 3 - .../FirstTimeUserAddNewsBlurViewController.h | 5 +- .../FirstTimeUserAddNewsBlurViewController.m | 3 - .../FirstTimeUserAddSitesViewController.h | 5 +- .../FirstTimeUserAddSitesViewController.m | 3 - .../ios/Classes/FriendsListViewController.h | 2 - .../ios/Classes/FriendsListViewController.m | 3 - clients/ios/Classes/LoginViewController.h | 4 - clients/ios/Classes/LoginViewController.m | 3 - clients/ios/Classes/MenuViewController.h | 3 +- clients/ios/Classes/MoveSiteViewController.h | 7 +- clients/ios/Classes/MoveSiteViewController.m | 6 -- clients/ios/Classes/NewsBlurAppDelegate.m | 6 +- .../ios/Classes/NotificationsViewController.h | 2 - .../ios/Classes/NotificationsViewController.m | 6 -- .../ios/Classes/OriginalStoryViewController.h | 4 - .../ios/Classes/OriginalStoryViewController.m | 3 - clients/ios/Classes/PremiumViewController.h | 1 - clients/ios/Classes/PremiumViewController.m | 2 - clients/ios/Classes/SceneDelegate.swift | 17 ++++ clients/ios/Classes/ShareViewController.h | 1 - clients/ios/Classes/ShareViewController.m | 5 +- clients/ios/Classes/SplitViewController.swift | 6 ++ .../Classes/StoryDetailObjCViewController.h | 5 - .../Classes/StoryDetailObjCViewController.m | 10 +- .../Classes/StoryPagesObjCViewController.h | 2 - .../Classes/StoryPagesObjCViewController.m | 12 ++- clients/ios/Classes/ToolbarDelegate.swift | 71 ++++++++++++++ clients/ios/Classes/TrainerViewController.h | 3 - clients/ios/Classes/TrainerViewController.m | 3 - .../ios/Classes/UserProfileViewController.h | 4 - .../ios/Classes/UserProfileViewController.m | 6 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 6 ++ .../ios/Resources/MainInterface.storyboard | 2 +- 46 files changed, 258 insertions(+), 221 deletions(-) create mode 100644 clients/ios/Classes/ToolbarDelegate.swift diff --git a/clients/ios/Classes/AddSiteViewController.h b/clients/ios/Classes/AddSiteViewController.h index 5023bf4b98..2e7c3d24a9 100644 --- a/clients/ios/Classes/AddSiteViewController.h +++ b/clients/ios/Classes/AddSiteViewController.h @@ -7,15 +7,10 @@ // #import -#import "NewsBlurAppDelegate.h" #import "NewsBlur-Swift.h" -@class NewsBlurAppDelegate; - @interface AddSiteViewController : BaseViewController - { - NewsBlurAppDelegate *appDelegate; -} + - (void)reload; - (IBAction)addSite; diff --git a/clients/ios/Classes/AddSiteViewController.m b/clients/ios/Classes/AddSiteViewController.m index deb96de737..7ffb83bf8b 100644 --- a/clients/ios/Classes/AddSiteViewController.m +++ b/clients/ios/Classes/AddSiteViewController.m @@ -8,7 +8,6 @@ #import "AddSiteViewController.h" #import "AddSiteAutocompleteCell.h" -#import "NewsBlurAppDelegate.h" #import "MenuViewController.h" #import "SBJson4.h" #import "NewsBlur-Swift.h" diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index 960576a102..717b2f91d6 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -1,7 +1,13 @@ #import #import "MBProgressHUD.h" -@interface BaseViewController : UIViewController +@class NewsBlurAppDelegate; + +@interface BaseViewController : UIViewController { + NewsBlurAppDelegate *appDelegate; +} + +@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic, readonly) BOOL isPhone; @property (nonatomic, readonly) BOOL isMac; @@ -30,5 +36,19 @@ - (void)collectionView:(UICollectionView *)collectionView selectItemAtIndexPath:(NSIndexPath *)indexPath animated:(BOOL)animated scrollPosition:(UICollectionViewScrollPosition)scrollPosition; - (void)collectionView:(UICollectionView *)collectionView deselectItemAtIndexPath:(NSIndexPath *)indexPath animated:(BOOL)animated; +- (IBAction)reloadFeeds:(id)sender; +- (IBAction)showMuteSites:(id)sender; +- (IBAction)showOrganizeSites:(id)sender; +- (IBAction)showWidgetSites:(id)sender; +- (IBAction)showNotifications:(id)sender; +- (IBAction)showFindFriends:(id)sender; +- (IBAction)showPremium:(id)sender; +- (IBAction)showSupportForum:(id)sender; +- (IBAction)showLogout:(id)sender; +- (IBAction)chooseColumns:(id)sender; +- (IBAction)chooseFontSize:(id)sender; +- (IBAction)chooseSpacing:(id)sender; +- (IBAction)chooseTheme:(id)sender; + @end diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 999aba944a..91e872d34c 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -4,17 +4,33 @@ @implementation BaseViewController +@synthesize appDelegate; + #pragma mark - #pragma mark HTTP requests - (instancetype)init { if (self = [super init]) { - + self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; } return self; } +- (void)awakeFromNib { + [super awakeFromNib]; + + self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; +} + +- (BOOL)becomeFirstResponder { + BOOL success = [super becomeFirstResponder]; + + NSLog(@"%@ becomeFirstResponder: %@", self, success ? @"yes" : @"no"); // log + + return success; +} + #pragma mark - #pragma mark View methods @@ -207,4 +223,80 @@ - (BOOL)isCompactWidth { //return self.compactWidth > 0.0; } +- (IBAction)reloadFeeds:(id)sender { + [appDelegate reloadFeedsView:NO]; +} + +- (IBAction)showMuteSites:(id)sender { + [self.appDelegate showMuteSites]; +} + +- (IBAction)showOrganizeSites:(id)sender { + [self.appDelegate showOrganizeSites]; +} + +- (IBAction)showWidgetSites:(id)sender { + [self.appDelegate showWidgetSites]; +} + +- (IBAction)showNotifications:(id)sender { + [self.appDelegate openNotificationsWithFeed:nil]; +} + +- (IBAction)showFindFriends:(id)sender { + [self.appDelegate showFindFriends]; +} + +- (IBAction)showPremium:(id)sender { + [self.appDelegate showPremiumDialog]; +} + +- (IBAction)showSupportForum:(id)sender { + NSURL *url = [NSURL URLWithString:@"https://forum.newsblur.com"]; + [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; +} + +- (IBAction)showLogout:(id)sender { + [self.appDelegate confirmLogout]; +} + +- (IBAction)chooseColumns:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"split_behavior"]; + + [UIView animateWithDuration:0.5 animations:^{ + [self.appDelegate updateSplitBehavior:YES]; + }]; + + [self.appDelegate.detailViewController updateLayoutWithReload:NO fetchFeeds:YES]; +} + +- (IBAction)chooseFontSize:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_font_size"]; + + [self.appDelegate resizeFontSize]; +} + +- (IBAction)chooseSpacing:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_spacing"]; + + [self.appDelegate.feedsViewController reloadFeedTitlesTable]; + [self.appDelegate.feedDetailViewController reloadWithSizing]; +} + +- (IBAction)chooseTheme:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [ThemeManager themeManager].theme = string; +} + @end diff --git a/clients/ios/Classes/DetailViewController.swift b/clients/ios/Classes/DetailViewController.swift index 78c20f6ede..507971bf32 100644 --- a/clients/ios/Classes/DetailViewController.swift +++ b/clients/ios/Classes/DetailViewController.swift @@ -10,11 +10,6 @@ import UIKit /// Manages the detail column of the split view, with the feed detail and/or the story pages. class DetailViewController: BaseViewController { - /// Returns the shared app delegate. - var appDelegate: NewsBlurAppDelegate { - return NewsBlurAppDelegate.shared() - } - /// Preference keys. enum Key { /// Style of the feed detail list layout. diff --git a/clients/ios/Classes/FeedChooserViewController.h b/clients/ios/Classes/FeedChooserViewController.h index 6c94bf1a42..71d6abb7fb 100644 --- a/clients/ios/Classes/FeedChooserViewController.h +++ b/clients/ios/Classes/FeedChooserViewController.h @@ -18,9 +18,7 @@ typedef NS_ENUM(NSUInteger, FeedChooserOperation) }; -@interface FeedChooserViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; -} +@interface FeedChooserViewController : BaseViewController @property (weak) IBOutlet UITableView *tableView; diff --git a/clients/ios/Classes/FeedChooserViewController.m b/clients/ios/Classes/FeedChooserViewController.m index 21ac5443e4..8949874741 100644 --- a/clients/ios/Classes/FeedChooserViewController.m +++ b/clients/ios/Classes/FeedChooserViewController.m @@ -30,7 +30,6 @@ @interface FeedChooserViewController () @property (nonatomic) FeedChooserSort sort; @property (nonatomic) BOOL ascending; @property (nonatomic) BOOL flat; -@property (nonatomic, readonly) NewsBlurAppDelegate *appDelegate; @property (nonatomic, strong) NSUserDefaults *groupDefaults; @property (nonatomic, readonly) NSArray *widgetFeeds; @@ -45,8 +44,6 @@ - (void)dealloc { - (void)viewDidLoad { [super viewDidLoad]; - appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - if (self.operation == FeedChooserOperationWidgetSites) { self.groupDefaults = [[NSUserDefaults alloc] initWithSuiteName:@"group.com.newsblur.NewsBlur-Group"]; } diff --git a/clients/ios/Classes/FeedDetailObjCViewController.h b/clients/ios/Classes/FeedDetailObjCViewController.h index d473053c87..ce7037dbf0 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.h +++ b/clients/ios/Classes/FeedDetailObjCViewController.h @@ -14,7 +14,6 @@ #import "MCSwipeTableViewCell.h" #import "FeedDetailTableCell.h" -@class NewsBlurAppDelegate; @class MCSwipeTableViewCell; @interface FeedDetailObjCViewController : BaseViewController @@ -23,8 +22,6 @@ MCSwipeTableViewCellDelegate, UIGestureRecognizerDelegate, UISearchBarDelegate, UITableViewDragDelegate> { - NewsBlurAppDelegate *appDelegate; - BOOL pageFetching; BOOL pageFinished; BOOL finishedAnimatingIn; @@ -39,7 +36,6 @@ NBNotifier *notifier; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic, strong) IBOutlet UITableView *storyTitlesTable; @property (nonatomic) IBOutlet UIBarButtonItem * feedMarkReadButton; @property (nonatomic) IBOutlet UIBarButtonItem * feedsBarButton; diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index cf70d2e5ed..cbbf4dfdf8 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -69,7 +69,6 @@ @implementation FeedDetailObjCViewController @synthesize separatorBarButton; @synthesize titleImageBarButton; @synthesize spacerBarButton, spacer2BarButton; -@synthesize appDelegate; @synthesize pageFetching; @synthesize pageFinished; @synthesize finishedAnimatingIn; @@ -92,8 +91,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(preferredContentSizeChanged:) name:UIContentSizeCategoryDidChangeNotification @@ -415,6 +412,11 @@ - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id { - NewsBlurAppDelegate *appDelegate; - NSMutableDictionary * activeFeedLocations; NSMutableDictionary *stillVisibleFeeds; NSMutableDictionary *visibleFolders; @@ -53,7 +49,6 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { NBNotifier *notifier; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIView *innerView; @property (nonatomic) IBOutlet UITableView *feedTitlesTable; @property (nonatomic) IBOutlet NSLayoutConstraint *feedTitlesTopConstraint; @@ -103,20 +98,7 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { - (void)didSelectSectionHeaderWithTag:(NSInteger)tag; - (void)selectNextFolderOrFeed; -- (IBAction)reloadFeeds:(id)sender; - (IBAction)selectIntelligence; -- (IBAction)showMuteSites:(id)sender; -- (IBAction)showOrganizeSites:(id)sender; -- (IBAction)showWidgetSites:(id)sender; -- (IBAction)showNotifications:(id)sender; -- (IBAction)showFindFriends:(id)sender; -- (IBAction)showPremium:(id)sender; -- (IBAction)showSupportForum:(id)sender; -- (IBAction)showLogout:(id)sender; -- (IBAction)chooseColumns:(id)sender; -- (IBAction)chooseFontSize:(id)sender; -- (IBAction)chooseSpacing:(id)sender; -- (IBAction)chooseTheme:(id)sender; - (void)markFeedRead:(NSString *)feedId cutoffDays:(NSInteger)days; - (void)markFeedsRead:(NSArray *)feedIds cutoffDays:(NSInteger)days; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index 86dc576c35..e20daec97e 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -57,7 +57,6 @@ @interface FeedsObjCViewController () @implementation FeedsObjCViewController -@synthesize appDelegate; @synthesize feedTitlesTable; @synthesize feedViewToolbar; @synthesize feedScoreSlider; @@ -254,6 +253,9 @@ - (void)viewWillAppear:(BOOL)animated { UITitlebar *titlebar = navController.navigationBar.window.windowScene.titlebar; titlebar.titleVisibility = UITitlebarTitleVisibilityHidden; + + [self.navigationController setNavigationBarHidden:YES animated:animated]; + [self.navigationController setToolbarHidden:YES animated:animated]; #endif NSUserDefaults *userPreferences = [NSUserDefaults standardUserDefaults]; @@ -2731,10 +2733,6 @@ - (void)refresh:(UIRefreshControl *)refreshControl { } #endif -- (IBAction)reloadFeeds:(id)sender { - [appDelegate reloadFeedsView:NO]; -} - - (void)finishRefresh { self.inPullToRefresh_ = NO; #if !TARGET_OS_MACCATALYST @@ -2869,78 +2867,6 @@ - (void)finishRefreshingFeedList:(NSDictionary *)results feedId:(NSString *)feed // return YES; //} -- (IBAction)showMuteSites:(id)sender { - [self.appDelegate showMuteSites]; -} - -- (IBAction)showOrganizeSites:(id)sender { - [self.appDelegate showOrganizeSites]; -} - -- (IBAction)showWidgetSites:(id)sender { - [self.appDelegate showWidgetSites]; -} - -- (IBAction)showNotifications:(id)sender { - [self.appDelegate openNotificationsWithFeed:nil]; -} - -- (IBAction)showFindFriends:(id)sender { - [self.appDelegate showFindFriends]; -} - -- (IBAction)showPremium:(id)sender { - [self.appDelegate showPremiumDialog]; -} - -- (IBAction)showSupportForum:(id)sender { - NSURL *url = [NSURL URLWithString:@"https://forum.newsblur.com"]; - [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; -} - -- (IBAction)showLogout:(id)sender { - [self.appDelegate confirmLogout]; -} - -- (IBAction)chooseColumns:(id)sender { - UICommand *command = sender; - NSString *string = command.propertyList; - - [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"split_behavior"]; - - [UIView animateWithDuration:0.5 animations:^{ - [self.appDelegate updateSplitBehavior:YES]; - }]; - - [self.appDelegate.detailViewController updateLayoutWithReload:NO fetchFeeds:YES]; -} - -- (IBAction)chooseFontSize:(id)sender { - UICommand *command = sender; - NSString *string = command.propertyList; - - [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_font_size"]; - - [self.appDelegate resizeFontSize]; -} - -- (IBAction)chooseSpacing:(id)sender { - UICommand *command = sender; - NSString *string = command.propertyList; - - [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"feed_list_spacing"]; - - [self reloadFeedTitlesTable]; - [self.appDelegate.feedDetailViewController reloadWithSizing]; -} - -- (IBAction)chooseTheme:(id)sender { - UICommand *command = sender; - NSString *string = command.propertyList; - - [ThemeManager themeManager].theme = string; -} - - (void)resetToolbar { // self.navigationItem.leftBarButtonItem = nil; self.navigationItem.titleView = nil; diff --git a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.h b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.h index af9a037262..6f67eca198 100644 --- a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.h +++ b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.h @@ -11,11 +11,8 @@ #import "NewsBlurAppDelegate.h" #import "NewsBlur-Swift.h" -@interface FirstTimeUserAddFriendsViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; -} +@interface FirstTimeUserAddFriendsViewController : BaseViewController -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIBarButtonItem *nextButton; @property (weak, nonatomic) IBOutlet UIButton *facebookButton; @property (weak, nonatomic) IBOutlet UIButton *twitterButton; diff --git a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m index 737299d4ce..63c44141ec 100644 --- a/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddFriendsViewController.m @@ -16,7 +16,6 @@ @interface FirstTimeUserAddFriendsViewController () @implementation FirstTimeUserAddFriendsViewController -@synthesize appDelegate; @synthesize nextButton; @synthesize facebookButton; @synthesize twitterButton; @@ -36,8 +35,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIBarButtonItem *next = [[UIBarButtonItem alloc] initWithTitle:@"Skip this step" style:UIBarButtonItemStyleDone target:self action:@selector(tapNextButton)]; self.nextButton = next; self.navigationItem.rightBarButtonItem = next; diff --git a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.h b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.h index 8643876558..e7b09d347c 100644 --- a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.h +++ b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.h @@ -9,11 +9,8 @@ #import #import "NewsBlurAppDelegate.h" -@interface FirstTimeUserAddNewsBlurViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; -} +@interface FirstTimeUserAddNewsBlurViewController : BaseViewController -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIBarButtonItem *nextButton; @property (strong, nonatomic) IBOutlet UILabel *instructionsLabel; diff --git a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m index b2922cc699..0e7be044ee 100644 --- a/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddNewsBlurViewController.m @@ -11,7 +11,6 @@ @implementation FirstTimeUserAddNewsBlurViewController -@synthesize appDelegate; @synthesize nextButton; @synthesize instructionsLabel; @@ -27,8 +26,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIBarButtonItem *next = [[UIBarButtonItem alloc] initWithTitle:@"Start reading" style:UIBarButtonItemStyleDone target:self action:@selector(tapNextButton)]; self.nextButton = next; self.navigationItem.rightBarButtonItem = next; diff --git a/clients/ios/Classes/FirstTimeUserAddSitesViewController.h b/clients/ios/Classes/FirstTimeUserAddSitesViewController.h index ef936b3b94..be255d8269 100644 --- a/clients/ios/Classes/FirstTimeUserAddSitesViewController.h +++ b/clients/ios/Classes/FirstTimeUserAddSitesViewController.h @@ -10,11 +10,8 @@ #import "NewsBlurAppDelegate.h" @interface FirstTimeUserAddSitesViewController : BaseViewController - { - NewsBlurAppDelegate *appDelegate; -} + -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIButton *googleReaderButton; @property (nonatomic) IBOutlet UIView *googleReaderButtonWrapper; @property (nonatomic) IBOutlet UIBarButtonItem *nextButton; diff --git a/clients/ios/Classes/FirstTimeUserAddSitesViewController.m b/clients/ios/Classes/FirstTimeUserAddSitesViewController.m index 99b06240ee..b449c72c44 100644 --- a/clients/ios/Classes/FirstTimeUserAddSitesViewController.m +++ b/clients/ios/Classes/FirstTimeUserAddSitesViewController.m @@ -24,7 +24,6 @@ @interface FirstTimeUserAddSitesViewController() @implementation FirstTimeUserAddSitesViewController -@synthesize appDelegate; @synthesize googleReaderButton; @synthesize nextButton; @synthesize activityIndicator; @@ -50,8 +49,6 @@ - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIBarButtonItem *next = [[UIBarButtonItem alloc] initWithTitle:@"Next step" style:UIBarButtonItemStyleDone target:self action:@selector(tapNextButton)]; self.nextButton = next; self.nextButton.enabled = YES; diff --git a/clients/ios/Classes/FriendsListViewController.h b/clients/ios/Classes/FriendsListViewController.h index 1c335e7100..f46ac98918 100644 --- a/clients/ios/Classes/FriendsListViewController.h +++ b/clients/ios/Classes/FriendsListViewController.h @@ -13,7 +13,6 @@ @class NewsBlurAppDelegate; @interface FriendsListViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; UISearchBar *friendSearchBar; UITableView *friendsTable; NSArray *suggestedUserProfiles; @@ -21,7 +20,6 @@ NSArray *userProfileIds; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UISearchBar *friendSearchBar; @property (nonatomic) IBOutlet UITableView *friendsTable; diff --git a/clients/ios/Classes/FriendsListViewController.m b/clients/ios/Classes/FriendsListViewController.m index 641325bc06..a98bd000f2 100644 --- a/clients/ios/Classes/FriendsListViewController.m +++ b/clients/ios/Classes/FriendsListViewController.m @@ -27,7 +27,6 @@ @interface FriendsListViewController() @implementation FriendsListViewController -@synthesize appDelegate; @synthesize friendSearchBar; @synthesize friendsTable; @synthesize suggestedUserProfiles; @@ -45,8 +44,6 @@ - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - self.navigationItem.title = @"Find Friends"; UIBarButtonItem *cancelButton = [[UIBarButtonItem alloc] initWithTitle: @"Done" style: UIBarButtonItemStylePlain diff --git a/clients/ios/Classes/LoginViewController.h b/clients/ios/Classes/LoginViewController.h index 3094f7563d..c19a52b628 100644 --- a/clients/ios/Classes/LoginViewController.h +++ b/clients/ios/Classes/LoginViewController.h @@ -12,8 +12,6 @@ #define LANDSCAPE_MARGIN 128 @interface LoginViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; - BOOL isOnSignUpScreen; UITextField *usernameInput; UITextField *passwordInput; @@ -46,8 +44,6 @@ - (void)animateLoop; -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; - @property (nonatomic) IBOutlet UITextField *usernameInput; @property (nonatomic) IBOutlet UITextField *passwordInput; @property (nonatomic) IBOutlet UITextField *emailInput; diff --git a/clients/ios/Classes/LoginViewController.m b/clients/ios/Classes/LoginViewController.m index 9c44685f9d..b9e0d56ede 100644 --- a/clients/ios/Classes/LoginViewController.m +++ b/clients/ios/Classes/LoginViewController.m @@ -12,7 +12,6 @@ @implementation LoginViewController -@synthesize appDelegate; @synthesize usernameInput; @synthesize passwordInput; @synthesize emailInput; @@ -44,8 +43,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil } - (void)viewDidLoad { - self.appDelegate = NewsBlurAppDelegate.sharedAppDelegate; - self.usernameInput.borderStyle = UITextBorderStyleRoundedRect; self.passwordInput.borderStyle = UITextBorderStyleRoundedRect; self.emailInput.borderStyle = UITextBorderStyleRoundedRect; diff --git a/clients/ios/Classes/MenuViewController.h b/clients/ios/Classes/MenuViewController.h index 0d323e355a..11728088e1 100644 --- a/clients/ios/Classes/MenuViewController.h +++ b/clients/ios/Classes/MenuViewController.h @@ -7,11 +7,12 @@ // #import +#import "BaseViewController.h" typedef void (^MenuItemHandler)(void); typedef void (^MenuItemSegmentedHandler)(NSUInteger selectedIndex); -@interface MenuViewController : UIViewController +@interface MenuViewController : BaseViewController @property (weak) IBOutlet UITableView *menuTableView; diff --git a/clients/ios/Classes/MoveSiteViewController.h b/clients/ios/Classes/MoveSiteViewController.h index f8fb7e6be3..d2d5460a05 100644 --- a/clients/ios/Classes/MoveSiteViewController.h +++ b/clients/ios/Classes/MoveSiteViewController.h @@ -9,16 +9,12 @@ #import #import "NewsBlurAppDelegate.h" -@class NewsBlurAppDelegate; - @interface FolderTextField : UITextField @end @interface MoveSiteViewController : BaseViewController - { - NewsBlurAppDelegate *appDelegate; -} + - (void)reload; - (IBAction)moveSite; @@ -27,7 +23,6 @@ - (IBAction)doMoveButton; - (NSArray *)pickerFolders; -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UITextField *fromFolderInput; @property (nonatomic) IBOutlet FolderTextField *toFolderInput; @property (nonatomic) IBOutlet UILabel *titleLabel; diff --git a/clients/ios/Classes/MoveSiteViewController.m b/clients/ios/Classes/MoveSiteViewController.m index d4eed46a66..eac5548437 100644 --- a/clients/ios/Classes/MoveSiteViewController.m +++ b/clients/ios/Classes/MoveSiteViewController.m @@ -7,13 +7,11 @@ // #import "MoveSiteViewController.h" -#import "NewsBlurAppDelegate.h" #import "StringHelper.h" #import "StoriesCollection.h" @implementation MoveSiteViewController -@synthesize appDelegate; @synthesize toFolderInput; @synthesize fromFolderInput; @synthesize titleLabel; @@ -34,8 +32,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil } - (void)viewDidLoad { - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIImageView *folderImage = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"folder-open"]]; folderImage.frame = CGRectMake(0, 0, 24, 16); [folderImage setContentMode:UIViewContentModeRight]; @@ -54,8 +50,6 @@ - (void)viewDidLoad { frame.size.height += 20; self.navBar.frame = frame; - appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - [super viewDidLoad]; } diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 9c4bb31150..d0a120e070 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -1173,6 +1173,7 @@ - (void)resetShareComments { #pragma mark View Management - (void)prepareViewControllers { + self.appDelegate = self; self.splitViewController = (SplitViewController *)self.window.rootViewController; NSArray *splitChildren = self.splitViewController.viewControllers; @@ -2398,8 +2399,11 @@ - (void)showSafariViewControllerWithURL:(NSURL *)url useReader:(BOOL)useReader { self.safariViewController = [[SFSafariViewController alloc] initWithURL:url configuration:config]; self.safariViewController.delegate = self; +#if TARGET_OS_MACCATALYST +#else [self.storyPagesViewController setNavigationBarHidden:NO]; [feedsNavigationController presentViewController:self.safariViewController animated:YES completion:nil]; +#endif } - (BOOL)showingSafariViewController { @@ -3350,7 +3354,7 @@ - (void)showPopoverWithViewController:(UIViewController *)viewController content [self.navigationControllerForPopover presentViewController:viewController animated:YES completion:^{ popoverPresentationController.passthroughViews = nil; // NSLog(@"%@ canBecomeFirstResponder? %d", viewController, viewController.canBecomeFirstResponder); - [viewController becomeFirstResponder]; +// [viewController becomeFirstResponder]; }]; } diff --git a/clients/ios/Classes/NotificationsViewController.h b/clients/ios/Classes/NotificationsViewController.h index 4a33d991cf..b0f59c8ea4 100644 --- a/clients/ios/Classes/NotificationsViewController.h +++ b/clients/ios/Classes/NotificationsViewController.h @@ -13,11 +13,9 @@ @class NewsBlurAppDelegate; @interface NotificationsViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; NSArray *notificationFeedIds; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UITableView *notificationsTable; @property (nonatomic) NSString *feedId; diff --git a/clients/ios/Classes/NotificationsViewController.m b/clients/ios/Classes/NotificationsViewController.m index 409f1be211..eb041e3402 100644 --- a/clients/ios/Classes/NotificationsViewController.m +++ b/clients/ios/Classes/NotificationsViewController.m @@ -16,14 +16,11 @@ @interface NotificationsViewController () @implementation NotificationsViewController @synthesize notificationsTable; -@synthesize appDelegate; @synthesize feedId; - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - self.navigationItem.title = @"Notifications"; UIBarButtonItem *cancelButton = [[UIBarButtonItem alloc] initWithTitle: @"Done" style: UIBarButtonItemStylePlain @@ -31,9 +28,6 @@ - (void)viewDidLoad { action: @selector(doCancelButton)]; [self.navigationItem setRightBarButtonItem:cancelButton]; - // Do any additional setup after loading the view from its nib. - self.appDelegate = (NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]; - notificationsTable = [[UITableView alloc] init]; notificationsTable.delegate = self; notificationsTable.dataSource = self; diff --git a/clients/ios/Classes/OriginalStoryViewController.h b/clients/ios/Classes/OriginalStoryViewController.h index 4798472a6c..214de52cd8 100644 --- a/clients/ios/Classes/OriginalStoryViewController.h +++ b/clients/ios/Classes/OriginalStoryViewController.h @@ -10,13 +10,10 @@ #import "BaseViewController.h" #import -@class NewsBlurAppDelegate; - @interface OriginalStoryViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; NSString *activeUrl; NSMutableArray *visitedUrls; WKWebView *webView; @@ -27,7 +24,6 @@ UIGestureRecognizerDelegate> { BOOL finishedLoading; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet WKWebView *webView; //@property (strong, nonatomic) SloppySwiper *swiper; @property (nonatomic) UIProgressView *progressView; diff --git a/clients/ios/Classes/OriginalStoryViewController.m b/clients/ios/Classes/OriginalStoryViewController.m index 6a3879ea2c..cc06ee89d1 100644 --- a/clients/ios/Classes/OriginalStoryViewController.m +++ b/clients/ios/Classes/OriginalStoryViewController.m @@ -17,7 +17,6 @@ @implementation OriginalStoryViewController -@synthesize appDelegate; @synthesize webView; //@synthesize swiper; @synthesize progressView; @@ -25,8 +24,6 @@ @implementation OriginalStoryViewController - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - self.view.layer.masksToBounds = NO; self.view.layer.shadowRadius = 5; self.view.layer.shadowOpacity = 0.5; diff --git a/clients/ios/Classes/PremiumViewController.h b/clients/ios/Classes/PremiumViewController.h index cce68780ee..9f1a8dede2 100644 --- a/clients/ios/Classes/PremiumViewController.h +++ b/clients/ios/Classes/PremiumViewController.h @@ -14,7 +14,6 @@ @interface PremiumViewController : BaseViewController -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UITableView *premiumTable; diff --git a/clients/ios/Classes/PremiumViewController.m b/clients/ios/Classes/PremiumViewController.m index 0aa2db547e..2a821e5c93 100644 --- a/clients/ios/Classes/PremiumViewController.m +++ b/clients/ios/Classes/PremiumViewController.m @@ -24,8 +24,6 @@ @implementation PremiumViewController - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIBarButtonItem *cancelButton = [[UIBarButtonItem alloc] initWithTitle: @"Done" style: UIBarButtonItemStylePlain target: self diff --git a/clients/ios/Classes/SceneDelegate.swift b/clients/ios/Classes/SceneDelegate.swift index 0743d599c6..037b98cb68 100644 --- a/clients/ios/Classes/SceneDelegate.swift +++ b/clients/ios/Classes/SceneDelegate.swift @@ -12,9 +12,26 @@ class SceneDelegate: UIResponder, UIWindowSceneDelegate { let appDelegate: NewsBlurAppDelegate = .shared var window: UIWindow? +#if targetEnvironment(macCatalyst) + var toolbar = NSToolbar(identifier: "main") + var toolbarDelegate = ToolbarDelegate() +#endif func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) { appDelegate.window = window + +#if targetEnvironment(macCatalyst) + guard let windowScene = scene as? UIWindowScene, let titlebar = windowScene.titlebar else { + return + } + + toolbar.delegate = toolbarDelegate + toolbar.displayMode = .iconOnly + + titlebar.toolbar = toolbar + titlebar.toolbarStyle = .automatic + +#endif appDelegate.prepareViewControllers() } } diff --git a/clients/ios/Classes/ShareViewController.h b/clients/ios/Classes/ShareViewController.h index 38df0a61a0..7d4cf67d6e 100644 --- a/clients/ios/Classes/ShareViewController.h +++ b/clients/ios/Classes/ShareViewController.h @@ -15,7 +15,6 @@ } @property (nonatomic) IBOutlet UITextView *commentField; -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIButton *facebookButton; @property (nonatomic) IBOutlet UIButton *twitterButton; @property (nonatomic) IBOutlet UIBarButtonItem *submitButton; diff --git a/clients/ios/Classes/ShareViewController.m b/clients/ios/Classes/ShareViewController.m index c6027a0062..6661edc770 100644 --- a/clients/ios/Classes/ShareViewController.m +++ b/clients/ios/Classes/ShareViewController.m @@ -21,7 +21,6 @@ @implementation ShareViewController @synthesize twitterButton; @synthesize submitButton; @synthesize commentField; -@synthesize appDelegate; @synthesize activeReplyId; @synthesize activeCommentId; @synthesize activeStoryId; @@ -38,9 +37,7 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil } - (void)viewDidLoad { - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - - [[NSNotificationCenter defaultCenter] + [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(onTextChange:) name:UITextViewTextDidChangeNotification diff --git a/clients/ios/Classes/SplitViewController.swift b/clients/ios/Classes/SplitViewController.swift index 096496ffe6..6cb8037405 100644 --- a/clients/ios/Classes/SplitViewController.swift +++ b/clients/ios/Classes/SplitViewController.swift @@ -24,4 +24,10 @@ class SplitViewController: UISplitViewController { override var childForStatusBarStyle: UIViewController? { return nil } + + // Can do menu validation here. +// override func canPerformAction(_ action: Selector, withSender sender: Any?) -> Bool { +// print("canPerformAction: \(action) with \(sender ?? "nil")") +// return true +// } } diff --git a/clients/ios/Classes/StoryDetailObjCViewController.h b/clients/ios/Classes/StoryDetailObjCViewController.h index 948d3dfbd5..28937ae5ac 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.h +++ b/clients/ios/Classes/StoryDetailObjCViewController.h @@ -11,13 +11,9 @@ #import "BaseViewController.h" @import WebKit; -@class NewsBlurAppDelegate; - @interface StoryDetailObjCViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; - NSString *activeStoryId; NSMutableDictionary *activeStory; UIView *innerView; @@ -34,7 +30,6 @@ UIActionSheetDelegate, WKNavigationDelegate> { UIInterfaceOrientation _orientation; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) NSString *activeStoryId; @property (nonatomic, readwrite) NSMutableDictionary *activeStory; @property (nonatomic) IBOutlet UIView *innerView; diff --git a/clients/ios/Classes/StoryDetailObjCViewController.m b/clients/ios/Classes/StoryDetailObjCViewController.m index fe8c9b748f..96c31750c7 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.m +++ b/clients/ios/Classes/StoryDetailObjCViewController.m @@ -35,7 +35,6 @@ @interface StoryDetailObjCViewController () @implementation StoryDetailObjCViewController -@synthesize appDelegate; @synthesize activeStoryId; @synthesize activeStory; @synthesize innerView; @@ -71,8 +70,6 @@ - (NSString *)description { - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - self.view.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight; AVAudioSession *audioSession = [AVAudioSession sharedInstance]; @@ -314,6 +311,11 @@ - (void)viewWillDisappear:(BOOL)animated { - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; +#if TARGET_OS_MACCATALYST + [self.navigationController setNavigationBarHidden:YES animated:animated]; + [self.navigationController setToolbarHidden:YES animated:animated]; +#endif + if (!self.isPhoneOrCompact) { [appDelegate.feedDetailViewController.view endEditing:YES]; } @@ -1421,6 +1423,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N } } +#if !TARGET_OS_MACCATALYST if (!isNavBarHidden && self.canHideNavigationBar && !nearTop) { [appDelegate.storyPagesViewController setNavigationBarHidden:YES]; } @@ -1428,6 +1431,7 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N if (isNavBarHidden && pullingDown) { [appDelegate.storyPagesViewController setNavigationBarHidden:NO]; } +#endif if (!atTop && !atBottom && !singlePage) { BOOL traversalVisible = appDelegate.storyPagesViewController.traverseView.alpha > 0; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.h b/clients/ios/Classes/StoryPagesObjCViewController.h index dcc1495b3c..81e2b1ff39 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.h +++ b/clients/ios/Classes/StoryPagesObjCViewController.h @@ -16,7 +16,6 @@ @interface StoryPagesObjCViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; THCircularProgressView *circularProgressView; UIButton *buttonPrevious; UIButton *buttonNext; @@ -37,7 +36,6 @@ CGFloat scrollPct; } -@property (nonatomic, strong) NewsBlurAppDelegate *appDelegate; @property (nonatomic) StoryDetailViewController *currentPage; @property (nonatomic) StoryDetailViewController *nextPage; @property (nonatomic) StoryDetailViewController *previousPage; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index 2810dc0a2b..f7ca86ebc4 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -36,7 +36,6 @@ @interface StoryPagesObjCViewController () @implementation StoryPagesObjCViewController -@synthesize appDelegate; @synthesize currentPage, nextPage, previousPage; @synthesize circularProgressView; @synthesize separatorBarButton; @@ -76,7 +75,6 @@ - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil - (void)viewDidLoad { [super viewDidLoad]; - appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; currentPage = [[StoryDetailViewController alloc] initWithNibName:@"StoryDetailViewController" bundle:nil]; @@ -251,6 +249,11 @@ - (void)viewDidLoad { - (void)viewWillAppear:(BOOL)animated { [super viewWillAppear:animated]; +#if TARGET_OS_MACCATALYST + [self.navigationController setNavigationBarHidden:YES animated:animated]; + [self.navigationController setToolbarHidden:YES animated:animated]; +#endif + [self updateTheme]; [self updateAutoscrollButtons]; @@ -390,7 +393,10 @@ - (void)viewWillDisappear:(BOOL)animated { previousPage.view.hidden = YES; appDelegate.detailViewController.parentNavigationController.interactivePopGestureRecognizer.enabled = YES; + +#if !TARGET_OS_MACCATALYST [appDelegate.detailViewController.parentNavigationController setNavigationBarHidden:NO animated:YES]; +#endif self.autoscrollActive = NO; } @@ -484,7 +490,7 @@ - (void)setNavigationBarHidden:(BOOL)hide { } - (void)setNavigationBarHidden:(BOOL)hide alsoTraverse:(BOOL)alsoTraverse { - if (self.navigationController == nil || self.navigationController.navigationBarHidden == hide || self.currentlyTogglingNavigationBar) { + if (appDelegate.isMac || self.navigationController == nil || self.navigationController.navigationBarHidden == hide || self.currentlyTogglingNavigationBar) { return; } diff --git a/clients/ios/Classes/ToolbarDelegate.swift b/clients/ios/Classes/ToolbarDelegate.swift new file mode 100644 index 0000000000..18d012faf1 --- /dev/null +++ b/clients/ios/Classes/ToolbarDelegate.swift @@ -0,0 +1,71 @@ +// +// ToolbarDelegate.swift +// NewsBlur +// +// Created by David Sinclair on 2024-01-05. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import UIKit + +#if targetEnvironment(macCatalyst) +class ToolbarDelegate: NSObject { +} + +extension NSToolbarItem.Identifier { + static let reloadFeeds = NSToolbarItem.Identifier("com.newsblur.reloadFeeds") + static let feedDetailSettings = NSToolbarItem.Identifier("com.newsblur.feedDetailSettings") +} + +extension ToolbarDelegate: NSToolbarDelegate { + func toolbarDefaultItemIdentifiers(_ toolbar: NSToolbar) -> [NSToolbarItem.Identifier] { + let identifiers: [NSToolbarItem.Identifier] = [ + .toggleSidebar, + .reloadFeeds, + .flexibleSpace, + .feedDetailSettings + ] + return identifiers + } + + func toolbarAllowedItemIdentifiers(_ toolbar: NSToolbar) -> [NSToolbarItem.Identifier] { + return toolbarDefaultItemIdentifiers(toolbar) + } + + func toolbar(_ toolbar: NSToolbar, + itemForItemIdentifier itemIdentifier: NSToolbarItem.Identifier, + willBeInsertedIntoToolbar flag: Bool) -> NSToolbarItem? { + switch itemIdentifier { + case .reloadFeeds: + return makeToolbarItem(itemIdentifier, + image: UIImage(systemName: "arrow.clockwise"), + label: "Reload Sites", + action: #selector(BaseViewController.reloadFeeds(_:))) + + case .feedDetailSettings: + return makeToolbarItem(itemIdentifier, + image: Utilities.imageNamed("settings", sized: 24), + label: "Site Settings", + action: #selector(FeedDetailViewController.doOpenSettingsMenu(_:))) + + default: + return nil + } + } + + func makeToolbarItem(_ identifier: NSToolbarItem.Identifier, + image: UIImage?, + label: String, + action: Selector, + target: AnyObject? = nil) -> NSToolbarItem { + let item = NSToolbarItem(itemIdentifier: identifier) + + item.image = image + item.label = label + item.action = action + item.target = target + + return item + } +} +#endif diff --git a/clients/ios/Classes/TrainerViewController.h b/clients/ios/Classes/TrainerViewController.h index 75cd5eb59c..d9708988bf 100644 --- a/clients/ios/Classes/TrainerViewController.h +++ b/clients/ios/Classes/TrainerViewController.h @@ -22,8 +22,6 @@ @interface TrainerViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; - IBOutlet UIBarButtonItem * closeButton; TrainerWebView *webView; IBOutlet UINavigationBar *navBar; @@ -33,7 +31,6 @@ BOOL storyTrainer; } -@property (nonatomic) IBOutlet NewsBlurAppDelegate *appDelegate; @property (nonatomic) IBOutlet UIBarButtonItem *closeButton; @property (nonatomic) IBOutlet TrainerWebView *webView; @property (nonatomic) IBOutlet UINavigationBar *navBar; diff --git a/clients/ios/Classes/TrainerViewController.m b/clients/ios/Classes/TrainerViewController.m index 76d7c682e2..ba33845050 100644 --- a/clients/ios/Classes/TrainerViewController.m +++ b/clients/ios/Classes/TrainerViewController.m @@ -17,7 +17,6 @@ @implementation TrainerViewController @synthesize closeButton; @synthesize webView; @synthesize navBar; -@synthesize appDelegate; @synthesize feedTrainer; @synthesize storyTrainer; @synthesize feedLoaded; @@ -35,8 +34,6 @@ - (void)viewDidLoad { [super viewDidLoad]; - self.appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - UIBarButtonItem *done = [[UIBarButtonItem alloc] initWithTitle:@"Done Training" style:UIBarButtonItemStyleDone diff --git a/clients/ios/Classes/UserProfileViewController.h b/clients/ios/Classes/UserProfileViewController.h index b8240b36e0..d54f3c3094 100644 --- a/clients/ios/Classes/UserProfileViewController.h +++ b/clients/ios/Classes/UserProfileViewController.h @@ -10,13 +10,10 @@ #import "NewsBlurAppDelegate.h" #import "NewsBlur-Swift.h" -@class NewsBlurAppDelegate; @class ProfileBadge; @interface UserProfileViewController : BaseViewController { - NewsBlurAppDelegate *appDelegate; - UILabel *followingCount; UILabel *followersCount; ProfileBadge *profileBadge; @@ -26,7 +23,6 @@ NSDictionary *userProfile; } -@property (nonatomic) NewsBlurAppDelegate *appDelegate; @property (nonatomic) ProfileBadge *profileBadge; @property (nonatomic) UITableView *profileTable; @property (nonatomic) NSArray *activitiesArray; diff --git a/clients/ios/Classes/UserProfileViewController.m b/clients/ios/Classes/UserProfileViewController.m index 2f56db4902..bbcb818418 100644 --- a/clients/ios/Classes/UserProfileViewController.m +++ b/clients/ios/Classes/UserProfileViewController.m @@ -17,7 +17,6 @@ @implementation UserProfileViewController -@synthesize appDelegate; @synthesize profileBadge; @synthesize profileTable; @synthesize activitiesArray; @@ -40,9 +39,7 @@ - (void)dealloc { - (void)viewDidLoad { [super viewDidLoad]; - // Do any additional setup after loading the view from its nib. - self.appDelegate = (NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]; - + UITableView *profiles = [[UITableView alloc] initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, self.view.frame.size.height) style:UITableViewStyleGrouped]; self.profileTable = profiles; self.profileTable.dataSource = self; @@ -89,7 +86,6 @@ - (void)getUserProfile { // self.view.frame = self.view.bounds; self.preferredContentSize = CGSizeMake(320, 454); - self.appDelegate = (NewsBlurAppDelegate *)[[UIApplication sharedApplication] delegate]; [MBProgressHUD hideHUDForView:self.view animated:YES]; MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.view animated:YES]; HUD.labelText = @"Profiling..."; diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 35052ea1f2..b397e7a285 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -753,6 +753,8 @@ 1788939D249332E6004CBA4E /* g_icn_search.png in Resources */ = {isa = PBXBuildFile; fileRef = 1788939C249332E6004CBA4E /* g_icn_search.png */; }; 1791C21526C4C7BC00D815AA /* WidgetStoryView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1791C21426C4C7BC00D815AA /* WidgetStoryView.swift */; }; 17997C5827A8FDD100483E69 /* WidgetDebugTimer.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17997C5727A8FDD100483E69 /* WidgetDebugTimer.swift */; }; + 179A88022B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179A88012B48E64900916CF4 /* ToolbarDelegate.swift */; }; + 179A88032B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 179A88012B48E64900916CF4 /* ToolbarDelegate.swift */; }; 179DD9CF23DFDD51007BFD21 /* CloudKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 179DD9CE23DFDD51007BFD21 /* CloudKit.framework */; }; 17A396D924F86A8F0023C9E2 /* MainInterface.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 17A396D824F86A8F0023C9E2 /* MainInterface.storyboard */; }; 17A92A3C289B7C6B00AB0A78 /* saved-stories@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 17A92A3B289B7C6B00AB0A78 /* saved-stories@2x.png */; }; @@ -1534,6 +1536,7 @@ 1788939C249332E6004CBA4E /* g_icn_search.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = g_icn_search.png; sourceTree = ""; }; 1791C21426C4C7BC00D815AA /* WidgetStoryView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidgetStoryView.swift; sourceTree = ""; }; 17997C5727A8FDD100483E69 /* WidgetDebugTimer.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidgetDebugTimer.swift; sourceTree = ""; }; + 179A88012B48E64900916CF4 /* ToolbarDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ToolbarDelegate.swift; sourceTree = ""; }; 179DD9CC23DFD20E007BFD21 /* BridgingHeader.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; name = BridgingHeader.h; path = "Other Sources/BridgingHeader.h"; sourceTree = ""; }; 179DD9CE23DFDD51007BFD21 /* CloudKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CloudKit.framework; path = System/Library/Frameworks/CloudKit.framework; sourceTree = SDKROOT; }; 17A396D824F86A8F0023C9E2 /* MainInterface.storyboard */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; path = MainInterface.storyboard; sourceTree = ""; }; @@ -3329,6 +3332,7 @@ 1D3623240D0F684500981E51 /* NewsBlurAppDelegate.h */, 1D3623250D0F684500981E51 /* NewsBlurAppDelegate.m */, 17150E1D2B05775A004D5309 /* SceneDelegate.swift */, + 179A88012B48E64900916CF4 /* ToolbarDelegate.swift */, FFD1D72F1459B63500E46F89 /* BaseViewController.h */, FFD1D7301459B63500E46F89 /* BaseViewController.m */, 17C074941C14C46B00CFCDB7 /* ThemeManager.h */, @@ -5026,6 +5030,7 @@ 1757926D2930605500490924 /* UserTagsViewController.m in Sources */, 1757926E2930605500490924 /* StringHelper.m in Sources */, 1757926F2930605500490924 /* TransparentToolbar.m in Sources */, + 179A88032B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */, 175792702930605500490924 /* THCircularProgressView.m in Sources */, 175792712930605500490924 /* IASKSpecifier.m in Sources */, 175792722930605500490924 /* UIView+ViewController.m in Sources */, @@ -5226,6 +5231,7 @@ FF6282151A11613900271FDB /* UserTagsViewController.m in Sources */, 43A4C3E315B00966008787B5 /* StringHelper.m in Sources */, 43A4C3E415B00966008787B5 /* TransparentToolbar.m in Sources */, + 179A88022B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */, FFD6604C1BACA45D006E4B8D /* THCircularProgressView.m in Sources */, FF34FD681E9D93CB0062F8ED /* IASKSpecifier.m in Sources */, FFA0484419CA73B700618DC4 /* UIView+ViewController.m in Sources */, diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index b0a2332666..fe156d68f6 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -267,7 +267,7 @@ - + From 012af7fe54ae3af2af4702c2e9106b861bafce73 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 18 Jan 2024 10:10:50 -0500 Subject: [PATCH 13/69] Build prep --- clients/ios/NewsBlur.xcodeproj/project.pbxproj | 10 ++-------- clients/ios/Resources/MainInterface.storyboard | 4 ++-- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 18d2dcefc6..9fbb5b996a 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -5503,7 +5503,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5555,7 +5554,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5698,7 +5696,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5732,7 +5729,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; - TARGETED_DEVICE_FAMILY = "1,2,6"; "WARNING_CFLAGS[arch=*]" = "-Wall"; }; name = Debug; @@ -5748,7 +5744,6 @@ CODE_SIGN_IDENTITY = "iPhone Developer"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer"; COPY_PHASE_STRIP = YES; - CURRENT_PROJECT_VERSION = 152; DEVELOPMENT_TEAM = HR7P97SD72; FRAMEWORK_SEARCH_PATHS = ( "$(inherited)", @@ -5779,7 +5774,6 @@ SUPPORTED_PLATFORMS = "iphoneos iphonesimulator"; SUPPORTS_MACCATALYST = NO; SWIFT_OBJC_BRIDGING_HEADER = "Other Sources/BridgingHeader.h"; - TARGETED_DEVICE_FAMILY = "1,2,6"; VALIDATE_PRODUCT = YES; }; name = Release; @@ -5810,7 +5804,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; + CURRENT_PROJECT_VERSION = 153; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; ENABLE_TESTABILITY = YES; @@ -5865,7 +5859,7 @@ CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; CODE_SIGN_IDENTITY = "iPhone Developer"; COPY_PHASE_STRIP = NO; - CURRENT_PROJECT_VERSION = 152; + CURRENT_PROJECT_VERSION = 153; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; ENABLE_STRICT_OBJC_MSGSEND = YES; GCC_C_LANGUAGE_STANDARD = "compiler-default"; diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 6361ca2c57..40ddffdeeb 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -1,9 +1,9 @@ - + - + From 46bdd7214f763f368351a28757b8a0118fa24646 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 18 Jan 2024 22:07:39 -0500 Subject: [PATCH 14/69] #1247 (Mac Catalyst edition) - The nav bar buttons are now implemented in a NSToolbar. - The popovers work as expected from those buttons. - Even supports different positions when the sidebar is hidden. --- .../Classes/FeedDetailObjCViewController.m | 30 +++++++++++++++++-- clients/ios/Classes/MenuViewController.h | 1 + clients/ios/Classes/MenuViewController.m | 6 ++++ clients/ios/Classes/NewsBlurAppDelegate.h | 1 + clients/ios/Classes/NewsBlurAppDelegate.m | 20 ++++++++++--- clients/ios/Classes/SplitViewController.swift | 4 +++ .../Classes/StoryPagesObjCViewController.m | 13 ++++++++ .../Classes/StoryPagesViewController.swift | 8 +++++ clients/ios/Classes/ToolbarDelegate.swift | 28 ++++++++++++++++- clients/ios/Other Sources/BridgingHeader.h | 1 + .../Resources/mac/Settings.bundle/Root.plist | 8 ++--- 11 files changed, 107 insertions(+), 13 deletions(-) diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index cbbf4dfdf8..840d503f32 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -2400,6 +2400,21 @@ - (void)markReadShowMenu:(MarkReadShowMenu)showMenu sender:(id)sender { visibleUnreadCount = 0; } +#if TARGET_OS_MACCATALYST + UINavigationController *feedDetailNavController = appDelegate.feedDetailViewController.navigationController; + UIView *sourceView = feedDetailNavController.view; + CGRect sourceRect = CGRectMake(120, 0, 20, 20); + + if (appDelegate.splitViewController.isFeedListHidden) { + sourceRect = CGRectMake(192, 0, 20, 20); + } + + [self.appDelegate showMarkReadMenuWithFeedIds:feedIds collectionTitle:collectionTitle visibleUnreadCount:visibleUnreadCount sourceView:sourceView sourceRect:sourceRect completionHandler:^(BOOL marked){ + if (marked) { + pop(); + } + }]; +#else UIBarButtonItem *barButton = self.feedMarkReadButton; if (sender && [sender isKindOfClass:[UIBarButtonItem class]]) barButton = sender; @@ -2408,6 +2423,7 @@ - (void)markReadShowMenu:(MarkReadShowMenu)showMenu sender:(id)sender { pop(); } }]; +#endif } - (IBAction)doOpenMarkReadMenu:(id)sender { @@ -2626,11 +2642,19 @@ - (IBAction)doOpenSettingsMenu:(id)sender { [viewController addThemeSegmentedControl]; -#if TARGET_OS_MACCATALYST - //TODO: 🚧 -#else UINavigationController *navController = self.navigationController ?: appDelegate.storyPagesViewController.navigationController; +#if TARGET_OS_MACCATALYST + UINavigationController *feedDetailNavController = appDelegate.feedDetailViewController.navigationController; + UIView *sourceView = feedDetailNavController.view; + CGRect sourceRect = CGRectMake(152, 0, 20, 20); + + if (appDelegate.splitViewController.isFeedListHidden) { + sourceRect = CGRectMake(224, 0, 20, 20); + } + + [viewController showFromNavigationController:navController barButtonItem:nil sourceView:sourceView sourceRect:sourceRect permittedArrowDirections:UIPopoverArrowDirectionDown]; +#else [viewController showFromNavigationController:navController barButtonItem:self.settingsBarButton]; #endif } diff --git a/clients/ios/Classes/MenuViewController.h b/clients/ios/Classes/MenuViewController.h index 11728088e1..3eed9c9f94 100644 --- a/clients/ios/Classes/MenuViewController.h +++ b/clients/ios/Classes/MenuViewController.h @@ -30,5 +30,6 @@ typedef void (^MenuItemSegmentedHandler)(NSUInteger selectedIndex); - (void)showFromNavigationController:(UINavigationController *)navigationController barButtonItem:(UIBarButtonItem *)barButtonItem; - (void)showFromNavigationController:(UINavigationController *)navigationController barButtonItem:(UIBarButtonItem *)barButtonItem permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; +- (void)showFromNavigationController:(UINavigationController *)navigationController barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections; @end diff --git a/clients/ios/Classes/MenuViewController.m b/clients/ios/Classes/MenuViewController.m index 918d64c637..4d15ed887e 100644 --- a/clients/ios/Classes/MenuViewController.m +++ b/clients/ios/Classes/MenuViewController.m @@ -284,6 +284,10 @@ - (void)showFromNavigationController:(UINavigationController *)navigationControl } - (void)showFromNavigationController:(UINavigationController *)navigationController barButtonItem:(UIBarButtonItem *)barButtonItem permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { + [self showFromNavigationController:navigationController barButtonItem:barButtonItem sourceView:nil sourceRect:CGRectZero permittedArrowDirections:permittedArrowDirections]; +} + +- (void)showFromNavigationController:(UINavigationController *)navigationController barButtonItem:(UIBarButtonItem *)barButtonItem sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect permittedArrowDirections:(UIPopoverArrowDirection)permittedArrowDirections { UIViewController *presentedViewController = navigationController.presentedViewController; if (presentedViewController && presentedViewController.presentationController.presentationStyle == UIModalPresentationPopover) { [presentedViewController dismissViewControllerAnimated:YES completion:nil]; @@ -300,6 +304,8 @@ - (void)showFromNavigationController:(UINavigationController *)navigationControl popoverPresentationController.backgroundColor = UIColorFromRGB(NEWSBLUR_WHITE_COLOR); popoverPresentationController.permittedArrowDirections = permittedArrowDirections; popoverPresentationController.barButtonItem = barButtonItem; + popoverPresentationController.sourceView = sourceView; + popoverPresentationController.sourceRect = sourceRect; [navigationController presentViewController:embeddedNavController animated:YES completion:nil]; } diff --git a/clients/ios/Classes/NewsBlurAppDelegate.h b/clients/ios/Classes/NewsBlurAppDelegate.h index 9aefe16a9d..cebcb9560c 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.h +++ b/clients/ios/Classes/NewsBlurAppDelegate.h @@ -438,6 +438,7 @@ SFSafariViewControllerDelegate> { - (void)renameFolder:(NSString *)newTitle; - (void)showMarkReadMenuWithFeedIds:(NSArray *)feedIds collectionTitle:(NSString *)collectionTitle visibleUnreadCount:(NSInteger)visibleUnreadCount barButtonItem:(UIBarButtonItem *)barButtonItem completionHandler:(void (^)(BOOL marked))completionHandler; +- (void)showMarkReadMenuWithFeedIds:(NSArray *)feedIds collectionTitle:(NSString *)collectionTitle visibleUnreadCount:(NSInteger)visibleUnreadCount sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect completionHandler:(void (^)(BOOL marked))completionHandler; - (void)showMarkReadMenuWithFeedIds:(NSArray *)feedIds collectionTitle:(NSString *)collectionTitle sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect completionHandler:(void (^)(BOOL marked))completionHandler; - (void)showMarkOlderNewerReadMenuWithStoriesCollection:(StoriesCollection *)olderNewerCollection story:(NSDictionary *)olderNewerStory sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect extraItems:(NSArray *)extraItems completionHandler:(void (^)(BOOL marked))completionHandler; diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index d0a120e070..05c5966518 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -1441,7 +1441,7 @@ - (UIModalPresentationStyle)adaptivePresentationStyleForPresentationController:( } - (void)presentationControllerDidDismiss:(UIPresentationController *)presentationController { - [self.feedsNavigationController.topViewController becomeFirstResponder]; +// [self.feedsNavigationController.topViewController becomeFirstResponder]; } #pragma mark - Network @@ -2291,7 +2291,9 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { } NSString *storyBrowser = [preferences stringForKey:@"story_browser"]; - if ([storyBrowser isEqualToString:@"safari"]) { + + if ([storyBrowser isEqualToString:@"system"] || [storyBrowser isEqualToString:@"safari"]) { + // There is no way to force opening in Safari if the default browser on macOS is not Safari. [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; // [[UIApplication sharedApplication] openURL:url]; return; @@ -2353,6 +2355,9 @@ - (void)showOriginalStory:(NSURL *)url sender:(id)sender { } - (void)showInAppBrowser:(NSURL *)url withCustomTitle:(NSString *)customTitle fromSender:(id)sender { +#if TARGET_OS_MACCATALYST + [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; +#else if (!originalStoryViewController) { originalStoryViewController = [[OriginalStoryViewController alloc] init]; } @@ -2385,9 +2390,13 @@ - (void)showInAppBrowser:(NSURL *)url withCustomTitle:(NSString *)customTitle fr [originalStoryViewController loadInitialStory]; [feedsNavigationController showViewController:originalStoryViewController sender:self]; } +#endif } - (void)showSafariViewControllerWithURL:(NSURL *)url useReader:(BOOL)useReader { +#if TARGET_OS_MACCATALYST + [[UIApplication sharedApplication] openURL:url options:@{} completionHandler:nil]; +#else SFSafariViewControllerConfiguration *config = [SFSafariViewControllerConfiguration new]; config.entersReaderIfAvailable = useReader; @@ -2399,9 +2408,8 @@ - (void)showSafariViewControllerWithURL:(NSURL *)url useReader:(BOOL)useReader { self.safariViewController = [[SFSafariViewController alloc] initWithURL:url configuration:config]; self.safariViewController.delegate = self; -#if TARGET_OS_MACCATALYST -#else [self.storyPagesViewController setNavigationBarHidden:NO]; + [feedsNavigationController presentViewController:self.safariViewController animated:YES completion:nil]; #endif } @@ -3274,6 +3282,10 @@ - (void)showMarkReadMenuWithFeedIds:(NSArray *)feedIds collectionTitle:(NSString [self showMarkReadMenuWithFeedIds:feedIds collectionTitle:collectionTitle visibleUnreadCount:0 olderNewerCollection:nil olderNewerStory:nil barButtonItem:nil sourceView:sourceView sourceRect:sourceRect extraItems:nil completionHandler:completionHandler]; } +- (void)showMarkReadMenuWithFeedIds:(NSArray *)feedIds collectionTitle:(NSString *)collectionTitle visibleUnreadCount:(NSInteger)visibleUnreadCount sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect completionHandler:(void (^)(BOOL marked))completionHandler { + [self showMarkReadMenuWithFeedIds:feedIds collectionTitle:collectionTitle visibleUnreadCount:visibleUnreadCount olderNewerCollection:nil olderNewerStory:nil barButtonItem:nil sourceView:sourceView sourceRect:sourceRect extraItems:nil completionHandler:completionHandler]; +} + - (void)showMarkOlderNewerReadMenuWithStoriesCollection:(StoriesCollection *)olderNewerCollection story:(NSDictionary *)olderNewerStory sourceView:(UIView *)sourceView sourceRect:(CGRect)sourceRect extraItems:(NSArray *)extraItems completionHandler:(void (^)(BOOL marked))completionHandler { [self showMarkReadMenuWithFeedIds:nil collectionTitle:nil visibleUnreadCount:0 olderNewerCollection:storiesCollection olderNewerStory:olderNewerStory barButtonItem:nil sourceView:sourceView sourceRect:sourceRect extraItems:extraItems completionHandler:completionHandler]; } diff --git a/clients/ios/Classes/SplitViewController.swift b/clients/ios/Classes/SplitViewController.swift index 6cb8037405..28279c62d4 100644 --- a/clients/ios/Classes/SplitViewController.swift +++ b/clients/ios/Classes/SplitViewController.swift @@ -10,6 +10,10 @@ import UIKit /// Subclass of `UISplitViewController` to enable customizations. class SplitViewController: UISplitViewController { + @objc var isFeedListHidden: Bool { + return [.oneBesideSecondary, .oneOverSecondary, .secondaryOnly].contains(displayMode) + } + /// Update the theme of the split view controller. @objc func updateTheme() { diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index f7ca86ebc4..e7035e070f 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -1514,13 +1514,26 @@ - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { #pragma mark - #pragma mark Styles +//- (BOOL)validateToolbarItem:(NSToolbarItem *)item { +// if item.itemIdentifier == +// return !self.currentPage.view.isHidden; +//} - (IBAction)toggleFontSize:(id)sender { UINavigationController *fontSettingsNavigationController = appDelegate.fontSettingsNavigationController; [fontSettingsNavigationController popToRootViewControllerAnimated:NO]; // [appDelegate showPopoverWithViewController:fontSettingsNavigationController contentSize:CGSizeZero sourceNavigationController:self.navigationController barButtonItem:self.fontSettingsButton sourceView:nil sourceRect:CGRectZero permittedArrowDirections:UIPopoverArrowDirectionAny]; + +#if TARGET_OS_MACCATALYST + UINavigationController *storiesNavController = appDelegate.storyPagesViewController.navigationController; + UIView *sourceView = storiesNavController.view; + CGRect sourceRect = CGRectMake(storiesNavController.view.frame.size.width - 59, 0, 20, 20); + + [appDelegate showPopoverWithViewController:fontSettingsNavigationController contentSize:CGSizeZero sourceView:sourceView sourceRect:sourceRect]; +#else [appDelegate showPopoverWithViewController:fontSettingsNavigationController contentSize:CGSizeZero barButtonItem:self.fontSettingsButton]; +#endif } - (void)setFontStyle:(NSString *)fontStyle { diff --git a/clients/ios/Classes/StoryPagesViewController.swift b/clients/ios/Classes/StoryPagesViewController.swift index 9842e6dcb5..c8d2aea89e 100644 --- a/clients/ios/Classes/StoryPagesViewController.swift +++ b/clients/ios/Classes/StoryPagesViewController.swift @@ -22,4 +22,12 @@ class StoryPagesViewController: StoryPagesObjCViewController { @objc func reloadWidget() { WidgetCenter.shared.reloadAllTimelines() } + + @objc func validateToolbarItem(_ item: NSToolbarItem) -> Bool { + if [.storyPagesSettings, .storyPagesBrowser].contains(item.itemIdentifier) { + return !self.currentPage.view.isHidden && self.currentPage.noStoryMessage.isHidden + } else { + return true + } + } } diff --git a/clients/ios/Classes/ToolbarDelegate.swift b/clients/ios/Classes/ToolbarDelegate.swift index 18d012faf1..5bafe70bd5 100644 --- a/clients/ios/Classes/ToolbarDelegate.swift +++ b/clients/ios/Classes/ToolbarDelegate.swift @@ -14,16 +14,24 @@ class ToolbarDelegate: NSObject { extension NSToolbarItem.Identifier { static let reloadFeeds = NSToolbarItem.Identifier("com.newsblur.reloadFeeds") + static let feedDetailUnread = NSToolbarItem.Identifier("com.newsblur.feedDetailUnread") static let feedDetailSettings = NSToolbarItem.Identifier("com.newsblur.feedDetailSettings") + static let storyPagesSettings = NSToolbarItem.Identifier("com.newsblur.storyPagesSettings") + static let storyPagesBrowser = NSToolbarItem.Identifier("com.newsblur.storyPagesBrowser") } extension ToolbarDelegate: NSToolbarDelegate { func toolbarDefaultItemIdentifiers(_ toolbar: NSToolbar) -> [NSToolbarItem.Identifier] { let identifiers: [NSToolbarItem.Identifier] = [ .toggleSidebar, + .space, .reloadFeeds, + .space, + .feedDetailUnread, + .feedDetailSettings, .flexibleSpace, - .feedDetailSettings + .storyPagesSettings, + .storyPagesBrowser ] return identifiers } @@ -42,12 +50,30 @@ extension ToolbarDelegate: NSToolbarDelegate { label: "Reload Sites", action: #selector(BaseViewController.reloadFeeds(_:))) + case .feedDetailUnread: + return makeToolbarItem(itemIdentifier, + image: Utilities.imageNamed("mark-read", sized: 24), + label: "Mark as Read", + action: #selector(FeedDetailViewController.doOpenMarkReadMenu(_:))) + case .feedDetailSettings: return makeToolbarItem(itemIdentifier, image: Utilities.imageNamed("settings", sized: 24), label: "Site Settings", action: #selector(FeedDetailViewController.doOpenSettingsMenu(_:))) + case .storyPagesSettings: + return makeToolbarItem(itemIdentifier, + image: Utilities.imageNamed("settings", sized: 24), + label: "Story Settings", + action: #selector(StoryPagesViewController.toggleFontSize(_:))) + + case .storyPagesBrowser: + return makeToolbarItem(itemIdentifier, + image: Utilities.imageNamed("original_button.png", sized: 24), + label: "Show Original Story", + action: #selector(StoryPagesViewController.showOriginalSubview(_:))) + default: return nil } diff --git a/clients/ios/Other Sources/BridgingHeader.h b/clients/ios/Other Sources/BridgingHeader.h index ca30b83223..1bb179a32d 100644 --- a/clients/ios/Other Sources/BridgingHeader.h +++ b/clients/ios/Other Sources/BridgingHeader.h @@ -11,6 +11,7 @@ #import #import "NSString+HTML.h" +#import "Utilities.h" #import "NewsBlurAppDelegate.h" #import "ThemeManager.h" #import "StoriesCollection.h" diff --git a/clients/ios/Resources/mac/Settings.bundle/Root.plist b/clients/ios/Resources/mac/Settings.bundle/Root.plist index d6dc0001a6..fb090081d0 100644 --- a/clients/ios/Resources/mac/Settings.bundle/Root.plist +++ b/clients/ios/Resources/mac/Settings.bundle/Root.plist @@ -119,8 +119,7 @@ Default browser Titles - In-app browser - Safari + System Default Chrome Opera Mini Firefox @@ -128,11 +127,10 @@ Brave DefaultValue - inapp + system Values - inapp - safari + system chrome opera_mini firefox From 1d68bc889bdb20dd8f2516f89eb27ea86a94838f Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 19 Jan 2024 21:44:29 -0500 Subject: [PATCH 15/69] #1247 (Mac Catalyst edition) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Added the acivity button to the user info above the feeds list. - Now validates the menu items based on visible views. - The View > Columns etc submenus now include checkmarks for their current state. - The Feed menu items are now disabled for Everything and folders etc as appropriate. - The Story > Save This Story and Mark as Read menu items toggle their titles as appropriate. - Fixed the Notifications, Statistics, and other panels not appearing. - Removed the View > Hide Toolbar menu command, since it doesn’t make sense to do so. - Fixed the no story message being hidden when it shouldn’t be. --- clients/ios/Classes/BaseViewController.h | 4 ++ clients/ios/Classes/BaseViewController.m | 60 +++++++++++++++++++ .../ios/Classes/DetailViewController.swift | 7 ++- .../Classes/FeedDetailObjCViewController.m | 8 +-- clients/ios/Classes/FeedsObjCViewController.h | 1 + clients/ios/Classes/FeedsObjCViewController.m | 26 ++++---- clients/ios/Classes/NewsBlurAppDelegate.m | 18 ++++++ clients/ios/Classes/StoriesCollection.h | 2 + clients/ios/Classes/StoriesCollection.m | 8 +++ .../Classes/StoryDetailObjCViewController.m | 4 +- .../Classes/StoryPagesObjCViewController.h | 2 - .../Classes/StoryPagesObjCViewController.m | 17 ++---- .../Classes/StoryPagesViewController.swift | 2 +- .../ios/NewsBlur.xcodeproj/project.pbxproj | 2 +- .../xcschemes/Alpha Widget Extension.xcscheme | 2 +- .../xcschemes/NewsBlur Alpha.xcscheme | 2 +- .../xcshareddata/xcschemes/NewsBlur.xcscheme | 2 +- .../xcschemes/Old Widget Extension.xcscheme | 2 +- .../xcschemes/Share Extension.xcscheme | 2 +- ...ry Notification Service Extension.xcscheme | 2 +- .../xcschemes/Widget Extension.xcscheme | 2 +- .../ios/Resources/MainInterface.storyboard | 8 ++- 22 files changed, 141 insertions(+), 42 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index 717b2f91d6..bfdb7e1062 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -14,6 +14,8 @@ @property (nonatomic, readonly) BOOL isVision; @property (nonatomic, readonly) BOOL isPortrait; @property (nonatomic, readonly) BOOL isCompactWidth; +@property (nonatomic, readonly) BOOL isFeedShown; +@property (nonatomic, readonly) BOOL isStoryShown; - (void)informError:(id)error; - (void)informError:(id)error statusCode:(NSInteger)statusCode; @@ -49,6 +51,8 @@ - (IBAction)chooseFontSize:(id)sender; - (IBAction)chooseSpacing:(id)sender; - (IBAction)chooseTheme:(id)sender; +- (IBAction)showTrain:(id)sender; +- (IBAction)showShare:(id)sender; @end diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 91e872d34c..3cc7417e7b 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -218,11 +218,63 @@ - (BOOL)isPortrait { } } +- (BOOL)isFeedShown { + return appDelegate.storiesCollection.activeFeed != nil || appDelegate.storiesCollection.activeFolder != nil; +} + +- (BOOL)isStoryShown { + return !appDelegate.storyPagesViewController.currentPage.view.isHidden && appDelegate.storyPagesViewController.currentPage.noStoryMessage.isHidden; +} + - (BOOL)isCompactWidth { return self.view.window.windowScene.traitCollection.horizontalSizeClass == UIUserInterfaceSizeClassCompact; //return self.compactWidth > 0.0; } +- (BOOL)canPerformAction:(SEL)action withSender:(id)sender { + if (action == @selector(muteSite) || action == @selector(openRenameSite)) { + return !appDelegate.storiesCollection.isEverything; + } else if (action == @selector(openTrainSite) || action == @selector(openNotifications:) || action == @selector(openStatistics:)) { + return !appDelegate.storiesCollection.isRiverOrSocial; + } else if (action == @selector(openRenameSite)) { + return appDelegate.storiesCollection.isSocialView; + } else if (action == @selector(showTrain:) || action == @selector(showShare:)) { + return self.isStoryShown; + } else { + return [super canPerformAction:action withSender:sender]; + } +} + +- (void)validateCommand:(UICommand *)command { + [super validateCommand:command]; + + if (command.action == @selector(chooseColumns:)) { + command.state = [command.propertyList isEqualToString:appDelegate.detailViewController.behaviorString]; + } else if (command.action == @selector(chooseFontSize:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"feed_list_font_size"]; + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseSpacing:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"feed_list_spacing"]; + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseTheme:)) { + command.state = [command.propertyList isEqualToString:ThemeManager.themeManager.theme]; + } else if (command.action == @selector(toggleStorySaved:)) { + BOOL isRead = [[self.appDelegate.activeStory objectForKey:@"starred"] boolValue]; + if (isRead) { + command.title = @"Unsave This Story"; + } else { + command.title = @"Save THis Story"; + } + } else if (command.action == @selector(toggleStoryUnread:)) { + BOOL isRead = [[self.appDelegate.activeStory objectForKey:@"read_status"] boolValue]; + if (isRead) { + command.title = @"Mark as Unread"; + } else { + command.title = @"Mark as Read"; + } + } +} + - (IBAction)reloadFeeds:(id)sender { [appDelegate reloadFeedsView:NO]; } @@ -299,4 +351,12 @@ - (IBAction)chooseTheme:(id)sender { [ThemeManager themeManager].theme = string; } +- (IBAction)showTrain:(id)sender { + [self.appDelegate openTrainStory:self.appDelegate.storyPagesViewController.fontSettingsButton]; +} + +- (IBAction)showShare:(id)sender { + [self.appDelegate.storyPagesViewController.currentPage openShareDialog]; +} + @end diff --git a/clients/ios/Classes/DetailViewController.swift b/clients/ios/Classes/DetailViewController.swift index 507971bf32..5a1e714e41 100644 --- a/clients/ios/Classes/DetailViewController.swift +++ b/clients/ios/Classes/DetailViewController.swift @@ -168,7 +168,7 @@ class DetailViewController: BaseViewController { /// How the split controller behaves. var behavior: Behavior { - switch UserDefaults.standard.string(forKey: Key.behavior) { + switch behaviorString { case BehaviorValue.tile: return .tile case BehaviorValue.displace: @@ -180,6 +180,11 @@ class DetailViewController: BaseViewController { } } + /// The split controller behavior as a raw string. + @objc var behaviorString: String { + return UserDefaults.standard.string(forKey: Key.behavior) ?? BehaviorValue.auto + } + /// Position of the divider between the views. var dividerPosition: CGFloat { get { diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index 840d503f32..b83a8c3e4d 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -3018,9 +3018,9 @@ - (void)toggleHiddenStories { } - (IBAction)openNotifications:(id)sender { - NSString *feedId = [self.appDelegate.storiesCollection.activeFeed objectForKey:@"id"]; + NSString *feedIdStr = storiesCollection.activeFeedIdStr; - [appDelegate openNotificationsWithFeed:feedId]; + [appDelegate openNotificationsWithFeed:feedIdStr]; } - (void)openNotificationsWithFeed:(NSString *)feedId { @@ -3028,9 +3028,9 @@ - (void)openNotificationsWithFeed:(NSString *)feedId { } - (IBAction)openStatistics:(id)sender { - NSString *feedId = [self.appDelegate.storiesCollection.activeFeed objectForKey:@"id"]; + NSString *feedIdStr = storiesCollection.activeFeedIdStr; - [appDelegate openStatisticsWithFeed:feedId sender:settingsBarButton]; + [appDelegate openStatisticsWithFeed:feedIdStr sender:settingsBarButton]; } - (void)openStatisticsWithFeed:(NSString *)feedId { diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index 32bbe38830..195f7da5f6 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -57,6 +57,7 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic) IBOutlet UIBarButtonItem * homeButton; @property (nonatomic) IBOutlet UIBarButtonItem * addBarButton; @property (nonatomic) IBOutlet UIBarButtonItem * settingsBarButton; +@property (nonatomic) UIButton *activityButton; @property (nonatomic) IBOutlet UIBarButtonItem * activitiesButton; #if TARGET_OS_MACCATALYST @property (nonatomic) IBOutlet UIBarButtonItem * spacerBarButton; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index e20daec97e..96b179d8b0 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -712,18 +712,19 @@ - (void)finishLoadingFeedListWithDict:(NSDictionary *)results finished:(BOOL)fin // [settingsBarButton setCustomView:settingsButton]; UIImage *activityImage = [Utilities templateImageNamed:@"dialog-notifications" sized:32]; - NBBarButtonItem *activityButton = [NBBarButtonItem buttonWithType:UIButtonTypeCustom]; - activityButton.accessibilityLabel = @"Activities"; - [activityButton setImage:activityImage forState:UIControlStateNormal]; - activityButton.tintColor = UIColorFromRGB(0x8F918B); - [activityButton setImageEdgeInsets:UIEdgeInsetsMake(4, 0, 4, 0)]; - [activityButton addTarget:self + [self.activityButton removeFromSuperview]; + self.activityButton = [NBBarButtonItem buttonWithType:UIButtonTypeCustom]; + self.activityButton.accessibilityLabel = @"Activities"; + [self.activityButton setImage:activityImage forState:UIControlStateNormal]; + self.activityButton.tintColor = UIColorFromRGB(0x8F918B); + [self.activityButton setImageEdgeInsets:UIEdgeInsetsMake(4, 0, 4, 0)]; + [self.activityButton addTarget:self action:@selector(showInteractionsPopover:) forControlEvents:UIControlEventTouchUpInside]; activitiesButton = [[UIBarButtonItem alloc] - initWithCustomView:activityButton]; + initWithCustomView:self.activityButton]; activitiesButton.width = 32; -// activityButton.backgroundColor = UIColor.redColor; +// self.activityButton.backgroundColor = UIColor.redColor; self.navigationItem.rightBarButtonItem = activitiesButton; NSMutableDictionary *sortedFolders = [[NSMutableDictionary alloc] init]; @@ -2881,7 +2882,7 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { [self.userInfoView removeFromSuperview]; self.userInfoView = [[UIView alloc] - initWithFrame:CGRectMake(0, 0, self.view.bounds.size.width, 50)]; + initWithFrame:CGRectMake(0, 0, self.innerView.bounds.size.width, 50)]; self.userInfoView.backgroundColor = UIColorFromLightSepiaMediumDarkRGB(0xE0E0E0, 0xFFF8CA, 0x4F4F4F, 0x292B2C); #else if (!orientation) { @@ -2969,15 +2970,18 @@ - (void)layoutHeaderCounts:(UIInterfaceOrientation)orientation { positiveCount.backgroundColor = [UIColor clearColor]; [self.userInfoView addSubview:positiveCount]; - [self.userInfoView sizeToFit]; - // self.userInfoView.backgroundColor = UIColor.blueColor; #if TARGET_OS_MACCATALYST + self.activityButton.frame = CGRectMake(self.innerView.bounds.size.width - 36, 10, 32, 32); + + [self.userInfoView addSubview:self.activityButton]; + [self.innerView addSubview:self.userInfoView]; self.feedTitlesTopConstraint.constant = 50; #else + [self.userInfoView sizeToFit]; self.navigationItem.titleView = self.userInfoView; #endif } diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 05c5966518..daac112938 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -3356,6 +3356,24 @@ - (void)showPopoverWithViewController:(UIViewController *)viewController content popoverPresentationController.backgroundColor = UIColorFromRGB(NEWSBLUR_WHITE_COLOR); popoverPresentationController.permittedArrowDirections = permittedArrowDirections; +#if TARGET_OS_MACCATALYST + if (barButtonItem && barButtonItem == appDelegate.feedDetailViewController.settingsBarButton) { + UINavigationController *feedDetailNavController = appDelegate.feedDetailViewController.navigationController; + barButtonItem = nil; + sourceView = feedDetailNavController.view; + if (appDelegate.splitViewController.isFeedListHidden) { + sourceRect = CGRectMake(224, 0, 20, 20); + } else { + sourceRect = CGRectMake(152, 0, 20, 20); + } + } else if (barButtonItem && barButtonItem == appDelegate.storyPagesViewController.fontSettingsButton) { + UINavigationController *storiesNavController = appDelegate.storyPagesViewController.navigationController; + barButtonItem = nil; + sourceView = storiesNavController.view; + sourceRect = CGRectMake(storiesNavController.view.frame.size.width - 59, 0, 20, 20); + } +#endif + if (barButtonItem) { popoverPresentationController.barButtonItem = barButtonItem; } else { diff --git a/clients/ios/Classes/StoriesCollection.h b/clients/ios/Classes/StoriesCollection.h index c8e3389994..910274d524 100644 --- a/clients/ios/Classes/StoriesCollection.h +++ b/clients/ios/Classes/StoriesCollection.h @@ -59,10 +59,12 @@ @property (nonatomic, readwrite) BOOL transferredFromDashboard; @property (nonatomic, readwrite) BOOL showHiddenStories; @property (nonatomic, readwrite) BOOL inSearch; +@property (nonatomic, readonly) BOOL isEverything; @property (nonatomic, readonly) BOOL isRiverOrSocial; @property (nonatomic) NSString *searchQuery; @property (nonatomic) NSString *savedSearchQuery; +@property (nonatomic, readonly) NSString *activeFeedIdStr; @property (nonatomic, readonly) NSString *activeOrder; @property (nonatomic, readonly) NSString *activeReadFilter; @property (nonatomic, readonly) NSString *activeStoryTitlesPosition; diff --git a/clients/ios/Classes/StoriesCollection.m b/clients/ios/Classes/StoriesCollection.m index aeac005ecb..1cdf94858f 100644 --- a/clients/ios/Classes/StoriesCollection.m +++ b/clients/ios/Classes/StoriesCollection.m @@ -97,6 +97,10 @@ - (void)transferStoriesFromCollection:(StoriesCollection *)fromCollection { self.savedSearchQuery = fromCollection.savedSearchQuery; } +- (BOOL)isEverything { + return [activeFolder isEqualToString:@"everything"]; +} + - (BOOL)isRiverOrSocial { return self.isRiverView || self.isSavedView || self.isReadView || self.isWidgetView || self.isSocialView || self.isSocialRiverView; } @@ -232,6 +236,10 @@ - (NSInteger)indexFromLocation:(NSInteger)location { return [[activeFeedStoryLocations objectAtIndex:location] intValue]; } +- (NSString *)activeFeedIdStr { + return [NSString stringWithFormat:@"%@", [activeFeed objectForKey:@"id"]]; +} + - (NSString *)activeOrder { NSUserDefaults *userPreferences = [NSUserDefaults standardUserDefaults]; NSString *orderPrefDefault = [userPreferences stringForKey:@"default_order"]; diff --git a/clients/ios/Classes/StoryDetailObjCViewController.m b/clients/ios/Classes/StoryDetailObjCViewController.m index 96c31750c7..42a91b1e72 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.m +++ b/clients/ios/Classes/StoryDetailObjCViewController.m @@ -1405,9 +1405,11 @@ - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(N int bottomPosition = webpageHeight - topPosition - viewportHeight; BOOL singlePage = webpageHeight - 200 <= viewportHeight; BOOL atBottom = bottomPosition < 150; - BOOL pullingDown = topPosition < 0; BOOL atTop = topPosition < 50; +#if !TARGET_OS_MACCATALYST + BOOL pullingDown = topPosition < 0; BOOL nearTop = topPosition < 100; +#endif if (!hasScrolled && topPosition != 0) { hasScrolled = YES; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.h b/clients/ios/Classes/StoryPagesObjCViewController.h index 81e2b1ff39..62281533cd 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.h +++ b/clients/ios/Classes/StoryPagesObjCViewController.h @@ -156,8 +156,6 @@ - (IBAction)toggleStorySaved:(id)sender; - (IBAction)toggleStoryUnread:(id)sender; -- (IBAction)showTrain:(id)sender; -- (IBAction)showShare:(id)sender; - (void)finishMarkAsSaved:(NSDictionary *)params; - (BOOL)failedMarkAsSaved:(NSDictionary *)params; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index e7035e070f..d6bb340234 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -1086,9 +1086,12 @@ - (void)changePage:(NSInteger)pageIndex animated:(BOOL)animated { } self.scrollingToPage = pageIndex; - [self.currentPage hideNoStoryMessage]; - [self.nextPage hideNoStoryMessage]; - [self.previousPage hideNoStoryMessage]; + + if (pageIndex >= 0) { + [self.currentPage hideNoStoryMessage]; + [self.nextPage hideNoStoryMessage]; + [self.previousPage hideNoStoryMessage]; + } // Check if already on the selected page if (self.isHorizontal ? offset.x == frame.origin.x : offset.y == frame.origin.y) { @@ -1488,14 +1491,6 @@ - (IBAction)toggleStoryUnread:(id)sender { [appDelegate.feedDetailViewController reload]; // XXX only if successful? } -- (IBAction)showTrain:(id)sender { - [self.appDelegate openTrainStory:self.appDelegate.storyPagesViewController.fontSettingsButton]; -} - -- (IBAction)showShare:(id)sender { - [self.appDelegate.storyPagesViewController.currentPage openShareDialog]; -} - - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { if (action == @selector(toggleTextView:) || action == @selector(scrollPageDown:) || diff --git a/clients/ios/Classes/StoryPagesViewController.swift b/clients/ios/Classes/StoryPagesViewController.swift index c8d2aea89e..d59a6c76f8 100644 --- a/clients/ios/Classes/StoryPagesViewController.swift +++ b/clients/ios/Classes/StoryPagesViewController.swift @@ -25,7 +25,7 @@ class StoryPagesViewController: StoryPagesObjCViewController { @objc func validateToolbarItem(_ item: NSToolbarItem) -> Bool { if [.storyPagesSettings, .storyPagesBrowser].contains(item.itemIdentifier) { - return !self.currentPage.view.isHidden && self.currentPage.noStoryMessage.isHidden + return self.isStoryShown } else { return true } diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index b397e7a285..3b57a7fbb8 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -3853,7 +3853,7 @@ attributes = { BuildIndependentTargetsInParallel = YES; LastSwiftUpdateCheck = 1120; - LastUpgradeCheck = 1510; + LastUpgradeCheck = 1520; ORGANIZATIONNAME = NewsBlur; TargetAttributes = { 173CB30C26BCE94700BA872A = { diff --git a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme index 786a1accc6..370ead6ad7 100644 --- a/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme +++ b/clients/ios/NewsBlur.xcodeproj/xcshareddata/xcschemes/Alpha Widget Extension.xcscheme @@ -1,6 +1,6 @@ - + @@ -421,9 +421,8 @@ - - + @@ -446,6 +445,9 @@ + + + From ed9279e2a33c09d94003434008ecf8f174a53e06 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 9 Feb 2024 11:47:03 -0500 Subject: [PATCH 16/69] #1247 (Mac Catalyst edition) - Added submenus to the View menu for Layout, Story Preview, Image Preview, Grid Columns, and Grid Height. - Implemented the remaining menu items. - Menus now update based on folder vs site. - Menus now disable when not appropriate. - Added some handy new stories collection properties. --- clients/ios/Classes/BaseViewController.h | 19 ++ clients/ios/Classes/BaseViewController.m | 191 ++++++++++++++++-- .../Classes/FeedDetailObjCViewController.h | 4 + .../Classes/FeedDetailObjCViewController.m | 13 +- .../Classes/FeedDetailViewController.swift | 4 - clients/ios/Classes/StoriesCollection.h | 3 + clients/ios/Classes/StoriesCollection.m | 12 ++ .../Classes/StoryDetailObjCViewController.m | 8 +- .../ios/Resources/MainInterface.storyboard | 165 ++++++++++++++- 9 files changed, 383 insertions(+), 36 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index bfdb7e1062..6ea30842dc 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -14,6 +14,7 @@ @property (nonatomic, readonly) BOOL isVision; @property (nonatomic, readonly) BOOL isPortrait; @property (nonatomic, readonly) BOOL isCompactWidth; +@property (nonatomic, readonly) BOOL isGrid; @property (nonatomic, readonly) BOOL isFeedShown; @property (nonatomic, readonly) BOOL isStoryShown; @@ -47,10 +48,28 @@ - (IBAction)showPremium:(id)sender; - (IBAction)showSupportForum:(id)sender; - (IBAction)showLogout:(id)sender; + - (IBAction)chooseColumns:(id)sender; +- (IBAction)chooseLayout:(id)sender; +- (IBAction)chooseTitle:(id)sender; +- (IBAction)choosePreview:(id)sender; +- (IBAction)chooseGridColumns:(id)sender; +- (IBAction)chooseGridHeight:(id)sender; - (IBAction)chooseFontSize:(id)sender; - (IBAction)chooseSpacing:(id)sender; - (IBAction)chooseTheme:(id)sender; + +- (IBAction)moveSite:(id)sender; +- (IBAction)openRenameSite:(id)sender; +- (IBAction)muteSite:(id)sender; +- (IBAction)deleteSite:(id)sender; +- (IBAction)openTrainSite:(id)sender; +- (IBAction)openNotifications:(id)sender; +- (IBAction)openStatistics:(id)sender; +- (IBAction)instaFetchFeed:(id)sender; +- (IBAction)doMarkAllRead:(id)sender; + +- (IBAction)showSendTo:(id)sender; - (IBAction)showTrain:(id)sender; - (IBAction)showShare:(id)sender; diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 3cc7417e7b..75f49a40e3 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -218,6 +218,15 @@ - (BOOL)isPortrait { } } +- (BOOL)isCompactWidth { + return self.view.window.windowScene.traitCollection.horizontalSizeClass == UIUserInterfaceSizeClassCompact; + //return self.compactWidth > 0.0; +} + +- (BOOL)isGrid { + return self.appDelegate.detailViewController.storyTitlesInGrid; +} + - (BOOL)isFeedShown { return appDelegate.storiesCollection.activeFeed != nil || appDelegate.storiesCollection.activeFolder != nil; } @@ -226,19 +235,30 @@ - (BOOL)isStoryShown { return !appDelegate.storyPagesViewController.currentPage.view.isHidden && appDelegate.storyPagesViewController.currentPage.noStoryMessage.isHidden; } -- (BOOL)isCompactWidth { - return self.view.window.windowScene.traitCollection.horizontalSizeClass == UIUserInterfaceSizeClassCompact; - //return self.compactWidth > 0.0; -} - - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { - if (action == @selector(muteSite) || action == @selector(openRenameSite)) { - return !appDelegate.storiesCollection.isEverything; - } else if (action == @selector(openTrainSite) || action == @selector(openNotifications:) || action == @selector(openStatistics:)) { - return !appDelegate.storiesCollection.isRiverOrSocial; - } else if (action == @selector(openRenameSite)) { - return appDelegate.storiesCollection.isSocialView; - } else if (action == @selector(showTrain:) || action == @selector(showShare:)) { + if (action == @selector(chooseLayout:)) { + return self.isFeedShown; + } else if (action == @selector(chooseTitle:) || action == @selector(choosePreview:)) { + return self.isFeedShown && !self.isGrid; + } else if (action == @selector(chooseGridColumns:) || action == @selector(chooseGridHeight:)) { + return self.isFeedShown && self.isGrid; + } else if (action == @selector(openTrainSite) || + action == @selector(openTrainSite:) || + action == @selector(openNotifications:) || + action == @selector(openStatistics:) || + action == @selector(moveSite:) || + action == @selector(openRenameSite:) || + action == @selector(deleteSite:)) { + return self.isFeedShown && appDelegate.storiesCollection.isCustomFolderOrFeed; + } else if (action == @selector(muteSite) || + action == @selector(muteSite:)) { + return self.isFeedShown && !appDelegate.storiesCollection.isRiverView; + } else if (action == @selector(instaFetchFeed:) || + action == @selector(doMarkAllRead:)) { + return self.isFeedShown; + } else if (action == @selector(showSendTo:) || + action == @selector(showTrain:) || + action == @selector(showShare:)) { return self.isStoryShown; } else { return [super canPerformAction:action withSender:sender]; @@ -250,6 +270,27 @@ - (void)validateCommand:(UICommand *)command { if (command.action == @selector(chooseColumns:)) { command.state = [command.propertyList isEqualToString:appDelegate.detailViewController.behaviorString]; + } else if (command.action == @selector(chooseLayout:)) { + NSString *value = self.appDelegate.storiesCollection.activeStoryTitlesPosition; + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseTitle:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"story_list_preview_text_size"]; + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(choosePreview:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"story_list_preview_images_size"]; + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseGridColumns:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"grid_columns"]; + if (value == nil) { + value = @"auto"; + } + command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseGridHeight:)) { + NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"grid_height"]; + if (value == nil) { + value = @"medium"; + } + command.state = [command.propertyList isEqualToString:value]; } else if (command.action == @selector(chooseFontSize:)) { NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"feed_list_font_size"]; command.state = [command.propertyList isEqualToString:value]; @@ -258,12 +299,24 @@ - (void)validateCommand:(UICommand *)command { command.state = [command.propertyList isEqualToString:value]; } else if (command.action == @selector(chooseTheme:)) { command.state = [command.propertyList isEqualToString:ThemeManager.themeManager.theme]; + } else if (command.action == @selector(openRenameSite:)) { + if (appDelegate.storiesCollection.isRiverOrSocial) { + command.title = @"Rename Folder…"; + } else { + command.title = @"Rename Site…"; + } + } else if (command.action == @selector(deleteSite:)) { + if (appDelegate.storiesCollection.isRiverOrSocial) { + command.title = @"Delete Folder…"; + } else { + command.title = @"Delete Site…"; + } } else if (command.action == @selector(toggleStorySaved:)) { BOOL isRead = [[self.appDelegate.activeStory objectForKey:@"starred"] boolValue]; if (isRead) { command.title = @"Unsave This Story"; } else { - command.title = @"Save THis Story"; + command.title = @"Save This Story"; } } else if (command.action == @selector(toggleStoryUnread:)) { BOOL isRead = [[self.appDelegate.activeStory objectForKey:@"read_status"] boolValue]; @@ -275,6 +328,9 @@ - (void)validateCommand:(UICommand *)command { } } +#pragma mark - +#pragma mark File menu + - (IBAction)reloadFeeds:(id)sender { [appDelegate reloadFeedsView:NO]; } @@ -312,6 +368,9 @@ - (IBAction)showLogout:(id)sender { [self.appDelegate confirmLogout]; } +#pragma mark - +#pragma mark View menu + - (IBAction)chooseColumns:(id)sender { UICommand *command = sender; NSString *string = command.propertyList; @@ -325,6 +384,52 @@ - (IBAction)chooseColumns:(id)sender { [self.appDelegate.detailViewController updateLayoutWithReload:NO fetchFeeds:YES]; } +- (IBAction)chooseLayout:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + NSString *key = self.appDelegate.storiesCollection.storyTitlesPositionKey; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:key]; + + [self.appDelegate.detailViewController updateLayoutWithReload:YES fetchFeeds:YES]; +} + +- (IBAction)chooseTitle:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"story_list_preview_text_size"]; + + [self.appDelegate resizePreviewSize]; +} + +- (IBAction)choosePreview:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"story_list_preview_images_size"]; + + [self.appDelegate resizePreviewSize]; +} + +- (IBAction)chooseGridColumns:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"grid_columns"]; + + [self.appDelegate.detailViewController updateLayoutWithReload:YES fetchFeeds:YES]; +} + +- (IBAction)chooseGridHeight:(id)sender { + UICommand *command = sender; + NSString *string = command.propertyList; + + [[NSUserDefaults standardUserDefaults] setObject:string forKey:@"grid_height"]; + + [self.appDelegate.detailViewController updateLayoutWithReload:YES fetchFeeds:YES]; +} + - (IBAction)chooseFontSize:(id)sender { UICommand *command = sender; NSString *string = command.propertyList; @@ -351,6 +456,66 @@ - (IBAction)chooseTheme:(id)sender { [ThemeManager themeManager].theme = string; } +#pragma mark - +#pragma mark Site menu + +- (IBAction)moveSite:(id)sender { + [self.appDelegate.feedDetailViewController openMoveView:self.appDelegate.navigationController]; +} + +- (IBAction)openRenameSite:(id)sender { + [self.appDelegate.feedDetailViewController openRenameSite]; +} + +- (IBAction)muteSite:(id)sender { + UIAlertController *alertController = [UIAlertController alertControllerWithTitle:[NSString stringWithFormat:@"Are you sure you wish to mute %@?", self.appDelegate.storiesCollection.activeTitle] message:nil preferredStyle:UIAlertControllerStyleAlert]; + [alertController addAction:[UIAlertAction actionWithTitle: @"Mute Site" style:UIAlertActionStyleDestructive handler:^(UIAlertAction * action) { + [alertController dismissViewControllerAnimated:YES completion:nil]; + [self.appDelegate.feedDetailViewController muteSite]; + }]]; + [alertController addAction:[UIAlertAction actionWithTitle:@"Cancel" + style:UIAlertActionStyleCancel handler:nil]]; + [self presentViewController:alertController animated:YES completion:nil]; +} + +- (IBAction)deleteSite:(id)sender { + UIAlertController *alertController = [UIAlertController alertControllerWithTitle:[NSString stringWithFormat:@"Are you sure you wish to delete %@?", self.appDelegate.storiesCollection.activeTitle] message:nil preferredStyle:UIAlertControllerStyleAlert]; + [alertController addAction:[UIAlertAction actionWithTitle: @"Delete Site" style:UIAlertActionStyleDestructive handler:^(UIAlertAction * action) { + [alertController dismissViewControllerAnimated:YES completion:nil]; + [self.appDelegate.feedDetailViewController deleteSite]; + }]]; + [alertController addAction:[UIAlertAction actionWithTitle:@"Cancel" + style:UIAlertActionStyleCancel handler:nil]]; + [self presentViewController:alertController animated:YES completion:nil]; +} + +- (IBAction)openTrainSite:(id)sender { + [self.appDelegate.feedDetailViewController openTrainSite]; +} + +- (IBAction)openNotifications:(id)sender { + [self.appDelegate.feedDetailViewController openNotifications:sender]; +} + +- (IBAction)openStatistics:(id)sender { + [self.appDelegate.feedDetailViewController openStatistics:sender]; +} + +- (IBAction)instaFetchFeed:(id)sender { + [self.appDelegate.feedDetailViewController instafetchFeed]; +} + +- (IBAction)doMarkAllRead:(id)sender { + [self.appDelegate.feedDetailViewController doMarkAllRead:sender]; +} + +#pragma mark - +#pragma mark Story menu + +- (IBAction)showSendTo:(id)sender { + [appDelegate showSendTo:self sender:sender]; +} + - (IBAction)showTrain:(id)sender { [self.appDelegate openTrainStory:self.appDelegate.storyPagesViewController.fontSettingsButton]; } diff --git a/clients/ios/Classes/FeedDetailObjCViewController.h b/clients/ios/Classes/FeedDetailObjCViewController.h index ce7037dbf0..836efca9ab 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.h +++ b/clients/ios/Classes/FeedDetailObjCViewController.h @@ -110,6 +110,7 @@ - (void)loadStoryAtRow:(NSInteger)row; - (void)redrawUnreadStory; - (IBAction)doOpenMarkReadMenu:(id)sender; +- (IBAction)doMarkAllRead:(id)sender; - (IBAction)doOpenSettingsMenu:(id)sender; - (void)deleteSite; - (void)deleteFolder; @@ -132,4 +133,7 @@ - (void)failedMarkAsUnsaved:(NSDictionary *)params; - (void)failedMarkAsUnread:(NSDictionary *)params; +- (void)confirmDeleteSite:(UINavigationController *)menuNavigationController; +- (void)openMoveView:(UINavigationController *)menuNavigationController; + @end diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index b83a8c3e4d..f96d0da9bd 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -764,7 +764,7 @@ - (void)reloadStories { } - (void)beginOfflineTimer { - if ([self.storiesCollection.activeFolder isEqualToString:@"infrequent"]) { + if (self.storiesCollection.isInfrequent) { return; } @@ -2442,11 +2442,6 @@ - (BOOL)isRiver { appDelegate.storiesCollection.isReadView; } -- (BOOL)isInfrequent { - return appDelegate.storiesCollection.isRiverView && - [appDelegate.storiesCollection.activeFolder isEqualToString:@"infrequent"]; -} - - (IBAction)doShowFeeds:(id)sender { [self.appDelegate showColumn:UISplitViewControllerColumnPrimary debugInfo:@"showFeeds"]; } @@ -2461,8 +2456,8 @@ - (IBAction)doOpenSettingsMenu:(id)sender { MenuViewController *viewController = [MenuViewController new]; __weak MenuViewController *weakViewController = viewController; - BOOL everything = [appDelegate.storiesCollection.activeFolder isEqualToString:@"everything"]; - BOOL infrequent = [self isInfrequent]; + BOOL everything = appDelegate.storiesCollection.isEverything; + BOOL infrequent = appDelegate.storiesCollection.isInfrequent; BOOL river = [self isRiver]; BOOL read = appDelegate.storiesCollection.isReadView; BOOL widget = appDelegate.storiesCollection.isWidgetView; @@ -3233,7 +3228,7 @@ - (IBAction)deleteSite:(id)sender { - (BOOL)canPullToRefresh { BOOL river = appDelegate.storiesCollection.isRiverView; - BOOL infrequent = [self isInfrequent]; + BOOL infrequent = appDelegate.storiesCollection.isInfrequent; BOOL read = appDelegate.storiesCollection.isReadView; BOOL widget = appDelegate.storiesCollection.isWidgetView; BOOL saved = appDelegate.storiesCollection.isSavedView; diff --git a/clients/ios/Classes/FeedDetailViewController.swift b/clients/ios/Classes/FeedDetailViewController.swift index 6c0904b7a6..d5870b8569 100644 --- a/clients/ios/Classes/FeedDetailViewController.swift +++ b/clients/ios/Classes/FeedDetailViewController.swift @@ -29,10 +29,6 @@ class FeedDetailViewController: FeedDetailObjCViewController { case loading } - var isGrid: Bool { - return appDelegate.detailViewController.layout == .grid - } - var wasGrid: Bool { return appDelegate.detailViewController.wasGrid } diff --git a/clients/ios/Classes/StoriesCollection.h b/clients/ios/Classes/StoriesCollection.h index 910274d524..c56cb75d43 100644 --- a/clients/ios/Classes/StoriesCollection.h +++ b/clients/ios/Classes/StoriesCollection.h @@ -60,7 +60,10 @@ @property (nonatomic, readwrite) BOOL showHiddenStories; @property (nonatomic, readwrite) BOOL inSearch; @property (nonatomic, readonly) BOOL isEverything; +@property (nonatomic, readonly) BOOL isInfrequent; @property (nonatomic, readonly) BOOL isRiverOrSocial; +@property (nonatomic, readonly) BOOL isCustomFolder; +@property (nonatomic, readonly) BOOL isCustomFolderOrFeed; @property (nonatomic) NSString *searchQuery; @property (nonatomic) NSString *savedSearchQuery; diff --git a/clients/ios/Classes/StoriesCollection.m b/clients/ios/Classes/StoriesCollection.m index 1cdf94858f..9b5b11ab35 100644 --- a/clients/ios/Classes/StoriesCollection.m +++ b/clients/ios/Classes/StoriesCollection.m @@ -101,10 +101,22 @@ - (BOOL)isEverything { return [activeFolder isEqualToString:@"everything"]; } +- (BOOL)isInfrequent { + return [activeFolder isEqualToString:@"infrequent"]; +} + - (BOOL)isRiverOrSocial { return self.isRiverView || self.isSavedView || self.isReadView || self.isWidgetView || self.isSocialView || self.isSocialRiverView; } +- (BOOL)isCustomFolder { + return self.isRiverView && !self.isEverything && !self.isInfrequent && !self.isSavedView && !self.isReadView && !self.isSocialView && !self.isWidgetView; +} + +- (BOOL)isCustomFolderOrFeed { + return !self.isRiverView || self.isCustomFolder; +} + #pragma mark - Story Traversal - (BOOL)isStoryUnread:(NSDictionary *)story { diff --git a/clients/ios/Classes/StoryDetailObjCViewController.m b/clients/ios/Classes/StoryDetailObjCViewController.m index 42a91b1e72..8490017ac8 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.m +++ b/clients/ios/Classes/StoryDetailObjCViewController.m @@ -2251,9 +2251,11 @@ - (void)fetchImage:(NSURL *)url copy:(BOOL)copy save:(BOOL)save { } - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { - if ([self respondsToSelector:action]) - return self.noStoryMessage.hidden; - return [super canPerformAction:action withSender:sender]; + if ([self respondsToSelector:action]) { + return [super canPerformAction:action withSender:sender] && self.noStoryMessage.hidden; + } else { + return [super canPerformAction:action withSender:sender]; + } } # pragma mark - diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index f60eec0638..07a1c2e1df 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -372,6 +372,151 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -453,15 +598,14 @@ - - + - + - + @@ -469,6 +613,9 @@ + + + @@ -479,7 +626,7 @@ - + @@ -498,7 +645,7 @@ - + @@ -528,7 +675,11 @@ - + + + + + From 94c4f766440041a58cd837ed9f8688eaa207f306 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 9 Feb 2024 14:01:32 -0500 Subject: [PATCH 17/69] #1247 (Mac Catalyst edition) - Fixed multiple selection in Mute Sites, Organize Sites, and Widget Sites not working properly, due to Catalyst issue. - Fixed the mark read and settings toolbar buttons being disabled in Grid view before selecting a story. --- clients/ios/Classes/BaseViewController.h | 2 ++ clients/ios/Classes/BaseViewController.m | 9 ++++++ .../ios/Classes/FeedChooserViewController.m | 30 +++++++++++++++++++ clients/ios/Classes/ToolbarDelegate.swift | 4 +-- 4 files changed, 43 insertions(+), 2 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index 6ea30842dc..f27fb810bc 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -68,6 +68,8 @@ - (IBAction)openStatistics:(id)sender; - (IBAction)instaFetchFeed:(id)sender; - (IBAction)doMarkAllRead:(id)sender; +- (IBAction)openMarkReadMenu:(id)sender; +- (IBAction)openSettingsMenu:(id)sender; - (IBAction)showSendTo:(id)sender; - (IBAction)showTrain:(id)sender; diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 75f49a40e3..9438a997fb 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -509,6 +509,15 @@ - (IBAction)doMarkAllRead:(id)sender { [self.appDelegate.feedDetailViewController doMarkAllRead:sender]; } +// These two are needed for the toolbar in Grid view. +- (IBAction)openMarkReadMenu:(id)sender { + [self.appDelegate.feedDetailViewController doOpenMarkReadMenu:sender]; +} + +- (IBAction)openSettingsMenu:(id)sender { + [self.appDelegate.feedDetailViewController doOpenSettingsMenu:sender]; +} + #pragma mark - #pragma mark Story menu diff --git a/clients/ios/Classes/FeedChooserViewController.m b/clients/ios/Classes/FeedChooserViewController.m index 8949874741..76bcdaf177 100644 --- a/clients/ios/Classes/FeedChooserViewController.m +++ b/clients/ios/Classes/FeedChooserViewController.m @@ -827,6 +827,36 @@ - (NSInteger)tableView:(UITableView *)theTableView sectionForSectionIndexTitle:( return indexIndex; } +#if TARGET_OS_MACCATALYST +- (NSIndexPath *)tableView:(UITableView *)tableView willSelectRowAtIndexPath:(NSIndexPath *)indexPath { + NSArray *selectedRows = [tableView indexPathsForSelectedRows]; + if ([selectedRows containsObject:indexPath]) { + [tableView deselectRowAtIndexPath:indexPath animated:false]; + return nil; + } + + return indexPath; +} + +- (NSIndexPath *)tableView:(UITableView *)tableView willDeselectRowAtIndexPath:(NSIndexPath *)indexPath { + NSArray *selectedRows = [tableView indexPathsForSelectedRows]; + if ([selectedRows containsObject:indexPath]) { + return nil; + } + + return indexPath; +} + +- (BOOL)tableView:(UITableView *)tableView shouldHighlightRowAtIndexPath:(NSIndexPath *)indexPath { + NSArray *selectedRows = [tableView indexPathsForSelectedRows]; + for (NSIndexPath *index in selectedRows) { + [[tableView cellForRowAtIndexPath:index] setHighlighted:YES]; + } + + return YES; +} +#endif + - (void)tableView:(UITableView *)tableView didSelectRowAtIndexPath:(NSIndexPath *)indexPath { if (self.operation == FeedChooserOperationWidgetSites) { [self deselectRowsOutsideSection:indexPath.section]; diff --git a/clients/ios/Classes/ToolbarDelegate.swift b/clients/ios/Classes/ToolbarDelegate.swift index 5bafe70bd5..b720abcba1 100644 --- a/clients/ios/Classes/ToolbarDelegate.swift +++ b/clients/ios/Classes/ToolbarDelegate.swift @@ -54,13 +54,13 @@ extension ToolbarDelegate: NSToolbarDelegate { return makeToolbarItem(itemIdentifier, image: Utilities.imageNamed("mark-read", sized: 24), label: "Mark as Read", - action: #selector(FeedDetailViewController.doOpenMarkReadMenu(_:))) + action: #selector(BaseViewController.openMarkReadMenu(_:))) case .feedDetailSettings: return makeToolbarItem(itemIdentifier, image: Utilities.imageNamed("settings", sized: 24), label: "Site Settings", - action: #selector(FeedDetailViewController.doOpenSettingsMenu(_:))) + action: #selector(BaseViewController.openSettingsMenu(_:))) case .storyPagesSettings: return makeToolbarItem(itemIdentifier, From 124b5babd840f7c26e949dc1e91ca59cb1047554 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 9 Feb 2024 16:55:28 -0500 Subject: [PATCH 18/69] #1247 (Mac Catalyst edition) - Improved scrolling performance of Grid view; now avoids reloading while actively scrolling (might hit bottom of loaded cards more often, though). - Added more diagnostics to check performance (also did Instruments profiling). --- .../Classes/FeedDetailViewController.swift | 24 +++++++++++++++++++ clients/ios/Classes/Story.swift | 3 ++- 2 files changed, 26 insertions(+), 1 deletion(-) diff --git a/clients/ios/Classes/FeedDetailViewController.swift b/clients/ios/Classes/FeedDetailViewController.swift index d5870b8569..1c135239dc 100644 --- a/clients/ios/Classes/FeedDetailViewController.swift +++ b/clients/ios/Classes/FeedDetailViewController.swift @@ -134,7 +134,15 @@ class FeedDetailViewController: FeedDetailObjCViewController { @objc var suppressMarkAsRead = false + var scrollingDate = Date.distantPast + func deferredReload(story: Story? = nil) { + if let story { + print("🪿 queuing deferred reload for \(story)") + } else { + print("🪿 queuing deferred reload") + } + reloadWorkItem?.cancel() if let story { @@ -149,6 +157,16 @@ class FeedDetailViewController: FeedDetailObjCViewController { } if pendingStories.isEmpty { + print("🪿 starting deferred reload") + + let secondsSinceScroll = -scrollingDate.timeIntervalSinceNow + + if secondsSinceScroll < 0.5 { + print("🪿 too soon to reload; \(secondsSinceScroll) seconds since scroll") + deferredReload(story: story) + return + } + configureDataSource() } else { for story in pendingStories.values { @@ -228,12 +246,18 @@ extension FeedDetailViewController: FeedDetailInteraction { let cacheCount = storyCache.before.count + storyCache.after.count if cacheCount > 0, story.index >= cacheCount - 5 { + let debug = Date() + if storiesCollection.isRiverView, storiesCollection.activeFolder != nil { fetchRiverPage(storiesCollection.feedPage + 1, withCallback: nil) } else { fetchFeedDetail(storiesCollection.feedPage + 1, withCallback: nil) } + + print("📠Fetching next page took \(-debug.timeIntervalSinceNow) seconds") } + + scrollingDate = Date() } func tapped(story: Story) { diff --git a/clients/ios/Classes/Story.swift b/clients/ios/Classes/Story.swift index a4908f93e8..fccd1a381b 100644 --- a/clients/ios/Classes/Story.swift +++ b/clients/ios/Classes/Story.swift @@ -179,6 +179,7 @@ class StoryCache: ObservableObject { } func reload() { + let debug = Date() let storyCount = Int(appDelegate.storiesCollection.storyLocationsCount) var beforeSelection = [Int]() var selectedIndex = -999 @@ -202,7 +203,7 @@ class StoryCache: ObservableObject { selected = selectedIndex >= 0 ? Story(index: selectedIndex) : nil after = afterSelection.map { Story(index: $0) } - print("🪿 Reload: \(before.count) before, \(selected == nil ? "none" : selected!.debugTitle) selected, \(after.count) after") + print("🪿 Reload: \(before.count) before, \(selected == nil ? "none" : selected!.debugTitle) selected, \(after.count) after, took \(-debug.timeIntervalSinceNow) seconds") // From 1aafa10d614deca9417966fb98c8b69321b8f8b4 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 9 Feb 2024 21:02:08 -0500 Subject: [PATCH 19/69] #1247 (Mac Catalyst edition) - Fixed Toggle Sidebar button / menu command not working in some situations. - Fixed drawing of segmented controls. - Checked iPhone and iPad apps; fixed some regressions. --- clients/ios/Classes/BaseViewController.m | 15 +++++++++++++++ clients/ios/Classes/FontSettingsViewController.m | 12 ++++++++++++ clients/ios/Classes/MenuViewController.m | 4 ++++ clients/ios/Classes/Story.swift | 2 +- .../ios/Classes/StoryPagesViewController.swift | 2 ++ clients/ios/Classes/ThemeManager.m | 4 ++++ 6 files changed, 38 insertions(+), 1 deletion(-) diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 9438a997fb..9953659811 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -273,6 +273,13 @@ - (void)validateCommand:(UICommand *)command { } else if (command.action == @selector(chooseLayout:)) { NSString *value = self.appDelegate.storiesCollection.activeStoryTitlesPosition; command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(toggleSidebar:)) { + UISplitViewController *splitViewController = self.appDelegate.splitViewController; + if (splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoBesideSecondary) { + command.title = @"Show Sidebar"; + } else { + command.title = @"Hide Sidebar"; + } } else if (command.action == @selector(chooseTitle:)) { NSString *value = [[NSUserDefaults standardUserDefaults] objectForKey:@"story_list_preview_text_size"]; command.state = [command.propertyList isEqualToString:value]; @@ -456,6 +463,14 @@ - (IBAction)chooseTheme:(id)sender { [ThemeManager themeManager].theme = string; } +- (IBAction)toggleSidebar:(id)sender{ + UISplitViewController *splitViewController = self.appDelegate.splitViewController; + + [UIView animateWithDuration:0.2 animations:^{ + splitViewController.preferredDisplayMode = (splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoBesideSecondary ? UISplitViewControllerDisplayModeTwoBesideSecondary : UISplitViewControllerDisplayModeOneBesideSecondary); + }]; +} + #pragma mark - #pragma mark Site menu diff --git a/clients/ios/Classes/FontSettingsViewController.m b/clients/ios/Classes/FontSettingsViewController.m index 21cb8e22b8..0c9663f6f0 100644 --- a/clients/ios/Classes/FontSettingsViewController.m +++ b/clients/ios/Classes/FontSettingsViewController.m @@ -439,7 +439,9 @@ - (UITableViewCell *)makeFontSizeTableCell { [self.fontSizeSegment setTitle:@"M" forSegmentAtIndex:2]; [self.fontSizeSegment setTitle:@"L" forSegmentAtIndex:3]; [self.fontSizeSegment setTitle:@"XL" forSegmentAtIndex:4]; +#if !TARGET_OS_MACCATALYST self.fontSizeSegment.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [self.fontSizeSegment setTitleTextAttributes:@{NSFontAttributeName:[UIFont fontWithName:@"WhitneySSm-Medium" size:12.0f]} forState:UIControlStateNormal]; [self.fontSizeSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:0]; [self.fontSizeSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:1]; @@ -467,7 +469,9 @@ - (UITableViewCell *)makeLineSpacingTableCell { [self.lineSpacingSegment setImage:[UIImage imageNamed:@"line_spacing_m"] forSegmentAtIndex:2]; [self.lineSpacingSegment setImage:[UIImage imageNamed:@"line_spacing_l"] forSegmentAtIndex:3]; [self.lineSpacingSegment setImage:[UIImage imageNamed:@"line_spacing_xl"] forSegmentAtIndex:4]; +#if !TARGET_OS_MACCATALYST self.lineSpacingSegment.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [[ThemeManager themeManager] updateSegmentedControl:self.lineSpacingSegment]; @@ -486,7 +490,9 @@ - (UITableViewCell *)makeFullScreenTableCell { self.fullscreenSegment.frame = CGRectMake(8, 7, cell.frame.size.width - 8*2, kMenuOptionHeight - 7*2); [self.fullscreenSegment setTitle:@"Full Screen" forSegmentAtIndex:0]; [self.fullscreenSegment setTitle:@"Toolbar" forSegmentAtIndex:1]; +#if !TARGET_OS_MACCATALYST self.fullscreenSegment.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [self.fullscreenSegment setTitleTextAttributes:@{NSFontAttributeName:[UIFont fontWithName:@"WhitneySSm-Medium" size:12.0f]} forState:UIControlStateNormal]; [self.fullscreenSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:0]; [self.fullscreenSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:1]; @@ -508,7 +514,9 @@ - (UITableViewCell *)makeAutoscrollTableCell { self.autoscrollSegment.frame = CGRectMake(8, 7, cell.frame.size.width - 8*2, kMenuOptionHeight - 7*2); [self.autoscrollSegment setTitle:@"Manual scroll" forSegmentAtIndex:0]; [self.autoscrollSegment setTitle:@"Auto scroll" forSegmentAtIndex:1]; +#if !TARGET_OS_MACCATALYST self.autoscrollSegment.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [self.autoscrollSegment setTitleTextAttributes:@{NSFontAttributeName:[UIFont fontWithName:@"WhitneySSm-Medium" size:12.0f]} forState:UIControlStateNormal]; [self.autoscrollSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:0]; [self.autoscrollSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:1]; @@ -530,7 +538,9 @@ - (UITableViewCell *)makeScrollOrientationTableCell { self.scrollOrientationSegment.frame = CGRectMake(8, 7, cell.frame.size.width - 8*2, kMenuOptionHeight - 7*2); [self.scrollOrientationSegment setTitle:@"â© Horizontal" forSegmentAtIndex:0]; [self.scrollOrientationSegment setTitle:@"⬠Vertical" forSegmentAtIndex:1]; +#if !TARGET_OS_MACCATALYST self.scrollOrientationSegment.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [self.scrollOrientationSegment setTitleTextAttributes:@{NSFontAttributeName:[UIFont fontWithName:@"WhitneySSm-Medium" size:12.0f]} forState:UIControlStateNormal]; [self.scrollOrientationSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:0]; [self.scrollOrientationSegment setContentOffset:CGSizeMake(0, 1) forSegmentAtIndex:1]; @@ -566,7 +576,9 @@ - (UITableViewCell *)makeThemeTableCell { [self.themeSegment setDividerImage:blankImage forLeftSegmentState:UIControlStateNormal rightSegmentState:UIControlStateNormal barMetrics:UIBarMetricsDefault]; self.themeSegment.tintColor = [UIColor clearColor]; +#if !TARGET_OS_MACCATALYST self.themeSegment.backgroundColor = [UIColor clearColor]; +#endif [[ThemeManager themeManager] updateThemeSegmentedControl:self.themeSegment]; diff --git a/clients/ios/Classes/MenuViewController.m b/clients/ios/Classes/MenuViewController.m index 4d15ed887e..c794858b5e 100644 --- a/clients/ios/Classes/MenuViewController.m +++ b/clients/ios/Classes/MenuViewController.m @@ -204,7 +204,9 @@ - (UITableViewCell *)makeThemeSegmentedTableCell { [segmentedControl setDividerImage:blankImage forLeftSegmentState:UIControlStateNormal rightSegmentState:UIControlStateNormal barMetrics:UIBarMetricsDefault]; segmentedControl.tintColor = [UIColor clearColor]; +#if !TARGET_OS_MACCATALYST segmentedControl.backgroundColor = [UIColor clearColor]; +#endif segmentedControl.selectedSegmentIndex = valueIndex; @@ -250,7 +252,9 @@ - (UITableViewCell *)makeSegmentedTableCellForItem:(NSDictionary *)item forRow:( segmentedControl.apportionsSegmentWidthsByContent = YES; segmentedControl.selectedSegmentIndex = [item[MenuSegmentIndex] integerValue]; segmentedControl.tag = row; +#if !TARGET_OS_MACCATALYST segmentedControl.backgroundColor = UIColorFromRGB(0xeeeeee); +#endif [segmentedControl setTitleTextAttributes:@{NSFontAttributeName : [UIFont fontWithName:@"WhitneySSm-Medium" size:12.0]} forState:UIControlStateNormal]; [segmentedControl addTarget:self action:@selector(segmentedValueChanged:) forControlEvents:UIControlEventValueChanged]; diff --git a/clients/ios/Classes/Story.swift b/clients/ios/Classes/Story.swift index fccd1a381b..ec697353e7 100644 --- a/clients/ios/Classes/Story.swift +++ b/clients/ios/Classes/Story.swift @@ -336,7 +336,7 @@ class StorySettings { guard let pref = UserDefaults.standard.string(forKey: "grid_columns"), let columns = Int(pref) else { if NewsBlurAppDelegate.shared.isCompactWidth { return 1 - } else if NewsBlurAppDelegate.shared.isPortrait { + } else if NewsBlurAppDelegate.shared.isPortrait || NewsBlurAppDelegate.shared.isPhone { return 2 } else { return 4 diff --git a/clients/ios/Classes/StoryPagesViewController.swift b/clients/ios/Classes/StoryPagesViewController.swift index d59a6c76f8..f6196e51fb 100644 --- a/clients/ios/Classes/StoryPagesViewController.swift +++ b/clients/ios/Classes/StoryPagesViewController.swift @@ -23,6 +23,7 @@ class StoryPagesViewController: StoryPagesObjCViewController { WidgetCenter.shared.reloadAllTimelines() } +#if targetEnvironment(macCatalyst) @objc func validateToolbarItem(_ item: NSToolbarItem) -> Bool { if [.storyPagesSettings, .storyPagesBrowser].contains(item.itemIdentifier) { return self.isStoryShown @@ -30,4 +31,5 @@ class StoryPagesViewController: StoryPagesObjCViewController { return true } } +#endif } diff --git a/clients/ios/Classes/ThemeManager.m b/clients/ios/Classes/ThemeManager.m index fd97518d0d..668683d17e 100644 --- a/clients/ios/Classes/ThemeManager.m +++ b/clients/ios/Classes/ThemeManager.m @@ -266,7 +266,9 @@ - (void)updateTextAttributesForSegmentedControl:(UISegmentedControl *)segmentedC - (void)updateSegmentedControl:(UISegmentedControl *)segmentedControl { segmentedControl.tintColor = UIColorFromRGB(0x8F918B); +#if !TARGET_OS_MACCATALYST segmentedControl.backgroundColor = UIColorFromLightDarkRGB(0xe7e6e7, 0x303030); +#endif segmentedControl.selectedSegmentTintColor = UIColorFromLightDarkRGB(0xffffff, 0x6f6f75); [self updateTextAttributesForSegmentedControl:segmentedControl forState:UIControlStateNormal foregroundColor:UIColorFromLightDarkRGB(0x909090, 0xaaaaaa)]; @@ -275,7 +277,9 @@ - (void)updateSegmentedControl:(UISegmentedControl *)segmentedControl { - (void)updateThemeSegmentedControl:(UISegmentedControl *)segmentedControl { segmentedControl.tintColor = [UIColor clearColor]; +#if !TARGET_OS_MACCATALYST segmentedControl.backgroundColor = [UIColor clearColor]; +#endif segmentedControl.selectedSegmentTintColor = [UIColor clearColor]; } From 985ab53c71483aa27c6dc0ba62cdcc102bac9839 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 22 Feb 2024 22:08:38 -0500 Subject: [PATCH 20/69] #1247 (Mac Catalyst edition) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Added a View â–¸ Show submenu for the intelligence mode. - Fixed the intelligence HUD not being horizontally centered on iPad and Mac. - Added Edit â–¸ Find in Sites and Find in Feed commands to select the search fields in those views. - Disabled keyboard scrolling of the story pages, since that interfered with changing stories in the feed detail. --- clients/ios/Classes/BaseViewController.h | 4 + clients/ios/Classes/BaseViewController.m | 31 ++++++- clients/ios/Classes/FeedsObjCViewController.m | 4 + .../Classes/StoryPagesObjCViewController.m | 4 + .../ios/Resources/MainInterface.storyboard | 86 +++++++++++++++---- 5 files changed, 110 insertions(+), 19 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index f27fb810bc..13da3b6409 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -39,6 +39,7 @@ - (void)collectionView:(UICollectionView *)collectionView selectItemAtIndexPath:(NSIndexPath *)indexPath animated:(BOOL)animated scrollPosition:(UICollectionViewScrollPosition)scrollPosition; - (void)collectionView:(UICollectionView *)collectionView deselectItemAtIndexPath:(NSIndexPath *)indexPath animated:(BOOL)animated; +- (IBAction)newSite:(id)sender; - (IBAction)reloadFeeds:(id)sender; - (IBAction)showMuteSites:(id)sender; - (IBAction)showOrganizeSites:(id)sender; @@ -49,6 +50,9 @@ - (IBAction)showSupportForum:(id)sender; - (IBAction)showLogout:(id)sender; +- (IBAction)findInFeeds:(id)sender; +- (IBAction)findInFeedDetail:(id)sender; + - (IBAction)chooseColumns:(id)sender; - (IBAction)chooseLayout:(id)sender; - (IBAction)chooseTitle:(id)sender; diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 9953659811..9bef957c01 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -236,7 +236,7 @@ - (BOOL)isStoryShown { } - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { - if (action == @selector(chooseLayout:)) { + if (action == @selector(chooseLayout:) || action == @selector(findInFeedDetail:)) { return self.isFeedShown; } else if (action == @selector(chooseTitle:) || action == @selector(choosePreview:)) { return self.isFeedShown && !self.isGrid; @@ -273,6 +273,10 @@ - (void)validateCommand:(UICommand *)command { } else if (command.action == @selector(chooseLayout:)) { NSString *value = self.appDelegate.storiesCollection.activeStoryTitlesPosition; command.state = [command.propertyList isEqualToString:value]; + } else if (command.action == @selector(chooseIntelligence:)) { + NSInteger intelligence = [[NSUserDefaults standardUserDefaults] integerForKey:@"selectedIntelligence"]; + NSString *value = [NSString stringWithFormat:@"%@", @(intelligence + 1)]; + command.state = [command.propertyList isEqualToString:value]; } else if (command.action == @selector(toggleSidebar:)) { UISplitViewController *splitViewController = self.appDelegate.splitViewController; if (splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoBesideSecondary) { @@ -338,6 +342,10 @@ - (void)validateCommand:(UICommand *)command { #pragma mark - #pragma mark File menu +- (IBAction)newSite:(id)sender { + [appDelegate.feedsViewController tapAddSite:nil]; +} + - (IBAction)reloadFeeds:(id)sender { [appDelegate reloadFeedsView:NO]; } @@ -375,6 +383,19 @@ - (IBAction)showLogout:(id)sender { [self.appDelegate confirmLogout]; } +#pragma mark - +#pragma mark Edit menu + +- (IBAction)findInFeeds:(id)sender { + [self.appDelegate showColumn:UISplitViewControllerColumnPrimary debugInfo:@"findInFeeds"]; + [self.appDelegate.feedsViewController.searchBar becomeFirstResponder]; +} + +- (IBAction)findInFeedDetail:(id)sender { + [self.appDelegate showColumn:UISplitViewControllerColumnSupplementary debugInfo:@"findInFeedDetail"]; + [self.appDelegate.feedDetailViewController.searchBar becomeFirstResponder]; +} + #pragma mark - #pragma mark View menu @@ -401,6 +422,14 @@ - (IBAction)chooseLayout:(id)sender { [self.appDelegate.detailViewController updateLayoutWithReload:YES fetchFeeds:YES]; } +- (IBAction)chooseIntelligence:(id)sender { + UICommand *command = sender; + NSInteger index = [command.propertyList integerValue]; + + [self.appDelegate.feedsViewController.intelligenceControl setSelectedSegmentIndex:index]; + [self.appDelegate.feedsViewController selectIntelligence]; +} + - (IBAction)chooseTitle:(id)sender { UICommand *command = sender; NSString *string = command.propertyList; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index 96b179d8b0..39eaf452ab 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -2431,6 +2431,10 @@ - (IBAction)selectIntelligence { hud.mode = MBProgressHUDModeText; hud.removeFromSuperViewOnHide = YES; + if (!self.appDelegate.isPhone) { + hud.xOffset = 50; + } + NSIndexPath *topRow; if ([[self.feedTitlesTable indexPathsForVisibleRows] count]) { topRow = [[self.feedTitlesTable indexPathsForVisibleRows] objectAtIndex:0]; diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index d6bb340234..25dae6627f 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -107,6 +107,10 @@ - (void)viewDidLoad { [self.scrollView setAlwaysBounceHorizontal:self.isHorizontal]; [self.scrollView setAlwaysBounceVertical:!self.isHorizontal]; + if (@available(macCatalyst 17.0, *)) { + self.scrollView.allowsKeyboardScrolling = NO; + } + if (!self.isPhone) { self.scrollView.contentInsetAdjustmentBehavior = UIScrollViewContentInsetAdjustmentNever; } diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 07a1c2e1df..4361d0b908 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -248,22 +248,15 @@ - - - - - - - - - - - - - - + + + + + + + @@ -272,7 +265,7 @@ - + @@ -295,7 +288,7 @@ - + @@ -312,7 +305,7 @@ - + @@ -322,7 +315,7 @@ - + @@ -341,6 +334,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -396,6 +418,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + From 77e95a1c1f42cb5b77be7d8e503f3fc1bd14961d Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 23 Feb 2024 13:52:37 -0500 Subject: [PATCH 21/69] #1247 (Mac Catalyst edition) - Added extra logic to prevent hiding the feeds list when changing between feeds in some situations. - Worked around UIKit bug where showing the feeds list would take two taps/clicks in some situations. --- clients/ios/Classes/BaseViewController.m | 28 ++++++++++++++++++++++- clients/ios/Classes/NewsBlurAppDelegate.m | 6 ++++- 2 files changed, 32 insertions(+), 2 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index 9bef957c01..e6c5c7faf4 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -496,7 +496,33 @@ - (IBAction)toggleSidebar:(id)sender{ UISplitViewController *splitViewController = self.appDelegate.splitViewController; [UIView animateWithDuration:0.2 animations:^{ - splitViewController.preferredDisplayMode = (splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoBesideSecondary ? UISplitViewControllerDisplayModeTwoBesideSecondary : UISplitViewControllerDisplayModeOneBesideSecondary); + NSLog(@"toggleSidebar: displayMode: %@; preferredDisplayMode: %@; UISplitViewControllerDisplayModeSecondaryOnly: %@; UISplitViewControllerDisplayModeTwoBesideSecondary: %@, UISplitViewControllerDisplayModeOneBesideSecondary: %@; ", @(splitViewController.displayMode), @(splitViewController.preferredDisplayMode), @(UISplitViewControllerDisplayModeSecondaryOnly), @(UISplitViewControllerDisplayModeTwoBesideSecondary), @(UISplitViewControllerDisplayModeOneBesideSecondary)); // log + + if (splitViewController.splitBehavior == UISplitViewControllerSplitBehaviorOverlay) { + splitViewController.preferredDisplayMode = (splitViewController.displayMode != UISplitViewControllerDisplayModeTwoOverSecondary ? UISplitViewControllerDisplayModeTwoOverSecondary : UISplitViewControllerDisplayModeOneOverSecondary); + } else if (splitViewController.splitBehavior == UISplitViewControllerSplitBehaviorDisplace) { + if (splitViewController.preferredDisplayMode == UISplitViewControllerDisplayModeTwoDisplaceSecondary && + splitViewController.displayMode == UISplitViewControllerDisplayModeSecondaryOnly) { + splitViewController.preferredDisplayMode = UISplitViewControllerDisplayModeOneBesideSecondary; + + dispatch_async(dispatch_get_main_queue(), ^(void) { + splitViewController.preferredDisplayMode = UISplitViewControllerDisplayModeTwoDisplaceSecondary; + }); + } else { + splitViewController.preferredDisplayMode = (splitViewController.displayMode != UISplitViewControllerDisplayModeTwoDisplaceSecondary ? UISplitViewControllerDisplayModeTwoDisplaceSecondary : UISplitViewControllerDisplayModeOneBesideSecondary); + } + } else { + if (splitViewController.preferredDisplayMode == UISplitViewControllerDisplayModeTwoBesideSecondary && + splitViewController.displayMode == UISplitViewControllerDisplayModeSecondaryOnly) { + splitViewController.preferredDisplayMode = UISplitViewControllerDisplayModeOneBesideSecondary; + + dispatch_async(dispatch_get_main_queue(), ^(void) { + splitViewController.preferredDisplayMode = UISplitViewControllerDisplayModeTwoBesideSecondary; + }); + } else { + splitViewController.preferredDisplayMode = (splitViewController.displayMode != UISplitViewControllerDisplayModeTwoBesideSecondary ? UISplitViewControllerDisplayModeTwoBesideSecondary : UISplitViewControllerDisplayModeOneBesideSecondary); + } + } }]; } diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index daac112938..41a6df37b7 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -825,7 +825,11 @@ - (void)popToRootWithCompletion:(void (^)(void))completion { - (void)showColumn:(UISplitViewControllerColumn)column debugInfo:(NSString *)debugInfo { NSLog(@"âš ï¸ show column for %@: split view controller: %@ split nav: %@; split controllers: %@; detail controller: %@; detail nav: %@; detail nav controllers: %@", debugInfo, self.splitViewController, self.splitViewController.navigationController, self.splitViewController.viewControllers, self.detailViewController, self.detailViewController.navigationController, self.detailViewController.navigationController.viewControllers); // log - [self.splitViewController showColumn:column]; + if (self.splitViewController.displayMode != UISplitViewControllerDisplayModeSecondaryOnly && (self.splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoBesideSecondary || + self.splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoDisplaceSecondary || + self.splitViewController.preferredDisplayMode != UISplitViewControllerDisplayModeTwoOverSecondary)) { + [self.splitViewController showColumn:column]; + } NSLog(@"...shown"); // log } From efc9f6bd46e7305ded2e05103f0048090ddb5cd4 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 23 Feb 2024 19:41:48 -0500 Subject: [PATCH 22/69] #1247 (Mac Catalyst edition) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Implemented a modern UIContextMenuInteraction for Feed Detail on Mac, since it handles right-click in a Mac way. - Disabled the Feed Detail long press menu on Mac, since it’s redundant. - Fixed some items in the contextual menu in Grid view. --- clients/ios/Classes/FeedDetailCardView.swift | 8 ++- .../Classes/FeedDetailObjCViewController.m | 10 ++-- .../Classes/FeedDetailViewController.swift | 49 +++++++++++++++++++ 3 files changed, 59 insertions(+), 8 deletions(-) diff --git a/clients/ios/Classes/FeedDetailCardView.swift b/clients/ios/Classes/FeedDetailCardView.swift index edad9fc990..d9beb7d8c8 100644 --- a/clients/ios/Classes/FeedDetailCardView.swift +++ b/clients/ios/Classes/FeedDetailCardView.swift @@ -75,6 +75,7 @@ struct CardView: View { } Button { + cache.appDelegate.activeStory = story.dictionary cache.appDelegate.feedDetailViewController.markFeedsRead(fromTimestamp: story.timestamp, andOlder: false) cache.appDelegate.feedDetailViewController.reload() } label: { @@ -82,12 +83,15 @@ struct CardView: View { } Button { + cache.appDelegate.activeStory = story.dictionary cache.appDelegate.feedDetailViewController.markFeedsRead(fromTimestamp: story.timestamp, andOlder: true) cache.appDelegate.feedDetailViewController.reload() } label: { Label("Mark older stories read", image: "mark-read") } + Divider() + Button { cache.appDelegate.storiesCollection.toggleStorySaved(story.dictionary) cache.appDelegate.feedDetailViewController.reload() @@ -96,13 +100,15 @@ struct CardView: View { } Button { + cache.appDelegate.activeStory = story.dictionary cache.appDelegate.showSend(to: cache.appDelegate.feedDetailViewController, sender: cache.appDelegate.feedDetailViewController.view) } label: { Label("Send this story to…", image: "email") } Button { - cache.appDelegate.openTrainStory(nil) + cache.appDelegate.activeStory = story.dictionary + cache.appDelegate.openTrainStory(cache.appDelegate.feedDetailViewController.view) } label: { Label("Train this story", image: "train") } diff --git a/clients/ios/Classes/FeedDetailObjCViewController.m b/clients/ios/Classes/FeedDetailObjCViewController.m index f96d0da9bd..23fc931af4 100644 --- a/clients/ios/Classes/FeedDetailObjCViewController.m +++ b/clients/ios/Classes/FeedDetailObjCViewController.m @@ -164,24 +164,20 @@ - (void)viewDidLoad { markReadLongPress.delegate = self; [view addGestureRecognizer:markReadLongPress]; + titleImageBarButton = [UIBarButtonItem alloc]; + #if TARGET_OS_MACCATALYST if (@available(macCatalyst 16.0, *)) { settingsBarButton.hidden = YES; feedMarkReadButton.hidden = YES; } -#endif - - titleImageBarButton = [UIBarButtonItem alloc]; - +#else UILongPressGestureRecognizer *tableLongPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleTableLongPress:)]; tableLongPress.minimumPressDuration = 1.0; tableLongPress.delegate = self; [self.storyTitlesTable addGestureRecognizer:tableLongPress]; -#if TARGET_OS_MACCATALYST - // CATALYST: support double-click; doing the following breaks clicking on rows in Catalyst. -#else UITapGestureRecognizer *doubleTapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:nil]; doubleTapGesture.numberOfTapsRequired = 2; diff --git a/clients/ios/Classes/FeedDetailViewController.swift b/clients/ios/Classes/FeedDetailViewController.swift index 1c135239dc..9e564e6cbc 100644 --- a/clients/ios/Classes/FeedDetailViewController.swift +++ b/clients/ios/Classes/FeedDetailViewController.swift @@ -216,6 +216,55 @@ extension FeedDetailViewController { reloadTable() } } + +#if targetEnvironment(macCatalyst) + override func tableView(_ tableView: UITableView, contextMenuConfigurationForRowAt indexPath: IndexPath, point: CGPoint) -> UIContextMenuConfiguration? { + let location = storyLocation(for: indexPath) + + guard location < storiesCollection.storyLocationsCount else { + return nil + } + + let storyIndex = storiesCollection.index(fromLocation: location) + let story = Story(index: storyIndex) + + appDelegate.activeStory = story.dictionary + + return UIContextMenuConfiguration(identifier: nil, previewProvider: nil) { suggestedActions in + let read = UIAction(title: story.isRead ? "Mark as unread" : "Mark as read", image: UIImage(named: "mark-read")) { action in + self.appDelegate.storiesCollection.toggleStoryUnread(story.dictionary) + self.reload() + } + + let newer = UIAction(title: "Mark newer stories read", image: UIImage(named: "mark-read")) { action in + self.markFeedsRead(fromTimestamp: story.timestamp, andOlder: false) + self.reload() + } + + let older = UIAction(title: "Mark older stories read", image: UIImage(named: "mark-read")) { action in + self.markFeedsRead(fromTimestamp: story.timestamp, andOlder: true) + self.reload() + } + + let saved = UIAction(title: story.isSaved ? "Unsave this story" : "Save this story", image: UIImage(named: "saved-stories")) { action in + self.appDelegate.storiesCollection.toggleStorySaved(story.dictionary) + self.reload() + } + + let send = UIAction(title: "Send this story to…", image: UIImage(named: "email")) { action in + self.appDelegate.showSend(to: self, sender: self.view) + } + + let train = UIAction(title: "Train this story", image: UIImage(named: "train")) { action in + self.appDelegate.openTrainStory(self.view) + } + + let submenu = UIMenu(title: "", options: .displayInline, children: [saved, send, train]) + + return UIMenu(title: "", children: [read, newer, older, submenu]) + } + } +#endif } extension FeedDetailViewController: FeedDetailInteraction { From 7e3e7ce2440c8514a86d25cfc1e9181fe1c37bfe Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 23 Feb 2024 20:48:44 -0500 Subject: [PATCH 23/69] #1247 (Mac Catalyst edition) - Now uses the blurred sidebar style for the Feeds list if the theme is similar to the system appearance (otherwise it looks horrible). --- .../ios/Classes/DetailViewController.swift | 2 +- clients/ios/Classes/FeedsObjCViewController.m | 33 +++++++------------ clients/ios/Classes/ThemeManager.h | 2 ++ clients/ios/Classes/ThemeManager.m | 12 +++++-- 4 files changed, 24 insertions(+), 25 deletions(-) diff --git a/clients/ios/Classes/DetailViewController.swift b/clients/ios/Classes/DetailViewController.swift index 5a1e714e41..52aabaee22 100644 --- a/clients/ios/Classes/DetailViewController.swift +++ b/clients/ios/Classes/DetailViewController.swift @@ -444,7 +444,7 @@ private extension DetailViewController { let isTop = layout == .top #if targetEnvironment(macCatalyst) -// splitViewController?.primaryBackgroundStyle = .sidebar //TODO: work in progress + splitViewController?.primaryBackgroundStyle = .sidebar #endif if layout != .grid || isPhone { diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index 39eaf452ab..b3c6f635c2 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -171,9 +171,13 @@ - (void)viewDidLoad { UIColorFromFixedRGB(0x4C4D4A)} forState:UIControlStateHighlighted]; #if TARGET_OS_MACCATALYST -// self.view.superview.backgroundColor = UIColor.clearColor; -// self.view.backgroundColor = UIColor.clearColor; - self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress + self.innerView.backgroundColor = UIColor.clearColor; + + if (ThemeManager.themeManager.isLikeSystem) { + self.view.backgroundColor = UIColor.clearColor; + } else { + self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); + } #else self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); #endif @@ -201,12 +205,7 @@ - (void)viewDidLoad { self.notifier.topOffsetConstraint = [NSLayoutConstraint constraintWithItem:self.notifier attribute:NSLayoutAttributeTop relatedBy:NSLayoutRelationEqual toItem:self.feedViewToolbar attribute:NSLayoutAttributeTop multiplier:1.0 constant:0]; [self.view addConstraint:self.notifier.topOffsetConstraint]; -#if TARGET_OS_MACCATALYST -// self.feedTitlesTable.backgroundColor = UIColor.clearColor; - self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress -#else self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); -#endif self.feedTitlesTable.separatorColor = [UIColor clearColor]; self.feedTitlesTable.translatesAutoresizingMaskIntoConstraints = NO; self.feedTitlesTable.estimatedRowHeight = 0; @@ -1311,11 +1310,6 @@ - (void)resizeFontSize { - (void)updateTheme { [super updateTheme]; - // CATALYST: This prematurely dismisses the login view controller; is it really appropriate? -// if (![self.presentedViewController isKindOfClass:[UINavigationController class]] || (((UINavigationController *)self.presentedViewController).topViewController != (UIViewController *)self.appDelegate.fontSettingsViewController && ![((UINavigationController *)self.presentedViewController).topViewController conformsToProtocol:@protocol(IASKViewController)])) { -// [self.presentedViewController dismissViewControllerAnimated:YES completion:nil]; -// } - [self.appDelegate hidePopoverAnimated:YES]; UINavigationBarAppearance *appearance = [[UINavigationBarAppearance alloc] initWithIdiom:[[UIDevice currentDevice] userInterfaceIdiom]]; @@ -1333,9 +1327,11 @@ - (void)updateTheme { self.addBarButton.tintColor = UIColorFromRGB(0x8F918B); self.settingsBarButton.tintColor = UIColorFromRGB(0x8F918B); #if TARGET_OS_MACCATALYST -// self.view.superview.backgroundColor = UIColor.clearColor; -// self.view.backgroundColor = UIColor.clearColor; - self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress + if (ThemeManager.themeManager.isLikeSystem) { + self.view.backgroundColor = UIColor.clearColor; + } else { + self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); + } #else self.refreshControl.tintColor = UIColorFromLightDarkRGB(0x0, 0xffffff); self.refreshControl.backgroundColor = UIColorFromRGB(0xE3E6E0); @@ -1365,12 +1361,7 @@ - (void)updateTheme { self.searchBar.keyboardAppearance = UIKeyboardAppearanceDefault; } -#if TARGET_OS_MACCATALYST -// self.feedTitlesTable.backgroundColor = UIColor.clearColor; - self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); //TODO: work in progress -#else self.feedTitlesTable.backgroundColor = UIColorFromRGB(0xf4f4f4); -#endif [self reloadFeedTitlesTable]; diff --git a/clients/ios/Classes/ThemeManager.h b/clients/ios/Classes/ThemeManager.h index 907585bc5a..801b64dd98 100644 --- a/clients/ios/Classes/ThemeManager.h +++ b/clients/ios/Classes/ThemeManager.h @@ -31,6 +31,8 @@ extern NSString * const ThemeStyleDark; @property (nonatomic, readonly) NSString *themeDisplayName; @property (nonatomic, readonly) NSString *themeCSSSuffix; @property (nonatomic, readonly) BOOL isDarkTheme; +@property (nonatomic, readonly) BOOL isSystemDark; +@property (nonatomic, readonly) BOOL isLikeSystem; + (instancetype)themeManager; diff --git a/clients/ios/Classes/ThemeManager.m b/clients/ios/Classes/ThemeManager.m index 668683d17e..a890f863c4 100644 --- a/clients/ios/Classes/ThemeManager.m +++ b/clients/ios/Classes/ThemeManager.m @@ -136,6 +136,14 @@ - (BOOL)isDarkTheme { return [theme isEqualToString:ThemeStyleDark] || [theme isEqualToString:ThemeStyleMedium]; } +- (BOOL)isSystemDark { + return self.appDelegate.window.windowScene.traitCollection.userInterfaceStyle == UIUserInterfaceStyleDark; +} + +- (BOOL)isLikeSystem { + return self.isDarkTheme == self.isSystemDark; +} + - (BOOL)isValidTheme:(NSString *)theme { return [theme isEqualToString:ThemeStyleLight] || [theme isEqualToString:ThemeStyleSepia] || [theme isEqualToString:ThemeStyleMedium] || [theme isEqualToString:ThemeStyleDark]; } @@ -444,9 +452,7 @@ - (void)handleThemeGesture:(UIPanGestureRecognizer *)recognizer { } - (void)updateForSystemAppearance { - BOOL isDark = self.appDelegate.window.windowScene.traitCollection.userInterfaceStyle == UIUserInterfaceStyleDark; - - [self systemAppearanceDidChange:isDark]; + [self systemAppearanceDidChange:self.isSystemDark]; } - (void)systemAppearanceDidChange:(BOOL)isDark { From 6f91f69e56aad5b4d4019a4e50795ecb1d82a088 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Fri, 23 Feb 2024 20:56:12 -0500 Subject: [PATCH 24/69] #1247 (Mac Catalyst edition) - Mac-specific story CSS. --- .../Classes/StoryDetailObjCViewController.m | 4 +- clients/ios/Classes/TrainerViewController.m | 4 ++ clients/ios/static/storyDetailView.css | 53 +++++++++++++++++++ 3 files changed, 59 insertions(+), 2 deletions(-) diff --git a/clients/ios/Classes/StoryDetailObjCViewController.m b/clients/ios/Classes/StoryDetailObjCViewController.m index 8490017ac8..02869509b2 100644 --- a/clients/ios/Classes/StoryDetailObjCViewController.m +++ b/clients/ios/Classes/StoryDetailObjCViewController.m @@ -486,7 +486,7 @@ - (void)drawStory:(BOOL)force withOrientation:(UIInterfaceOrientation)orientatio #if TARGET_OS_MACCATALYST // CATALYST: probably will want to add custom CSS for Macs. - contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide"; + contentWidthClass = @"NB-mac NB-ipad-pro-12-wide"; #else if (UIInterfaceOrientationIsLandscape(orientation) && !self.isPhoneOrCompact) { if (iPadPro12) { @@ -2421,7 +2421,7 @@ - (void)changeWebViewWidth { #if TARGET_OS_MACCATALYST // CATALYST: probably will want to add custom CSS for Macs. - contentWidthClass = @"NB-ipad-wide NB-ipad-pro-12-wide"; + contentWidthClass = @"NB-mac NB-ipad-pro-12-wide"; #else UIInterfaceOrientation orientation = self.view.window.windowScene.interfaceOrientation; diff --git a/clients/ios/Classes/TrainerViewController.m b/clients/ios/Classes/TrainerViewController.m index ba33845050..628f3ead77 100644 --- a/clients/ios/Classes/TrainerViewController.m +++ b/clients/ios/Classes/TrainerViewController.m @@ -159,6 +159,9 @@ - (NSString *)makeTrainerHTML { int contentWidth = self.view.frame.size.width; NSString *contentWidthClass; +#if TARGET_OS_MACCATALYST + contentWidthClass = @"NB-mac"; +#else if (contentWidth > 700) { contentWidthClass = @"NB-ipad-wide"; } else if (contentWidth > 480) { @@ -166,6 +169,7 @@ - (NSString *)makeTrainerHTML { } else { contentWidthClass = @"NB-iphone"; } +#endif // set up layout values based on iPad/iPhone NSString *headerString = [NSString stringWithFormat:@ diff --git a/clients/ios/static/storyDetailView.css b/clients/ios/static/storyDetailView.css index 9c04227a53..ff3c965e1e 100644 --- a/clients/ios/static/storyDetailView.css +++ b/clients/ios/static/storyDetailView.css @@ -87,6 +87,54 @@ line-height: 2.0em; } +/** + * Mac Style + */ + +.NB-mac .NB-header { + padding: 1em 30px; +} + +.NB-mac .NB-header .NB-header-inner { + margin: 0px 0px; +} + +.NB-mac .NB-story { + padding: 20px 30px; +} + +.NB-mac .NB-share-inner-wrapper { + margin: 0 30px; +} + +.NB-mac#story_pane .NB-story-comments-public-teaser, +.NB-mac#story_pane .NB-story-comments-public-header, +.NB-mac#story_pane .NB-story-comments-friends-header, +.NB-mac#story_pane .NB-story-comments-shares-teaser { + padding-left: 30px; + padding-right: 30px; +} + +.NB-mac#story_pane .NB-story-comment { + padding: 0 30px 2px 110px; +} + +.NB-mac#story_pane .NB-story-comment .NB-user-avatar, +.NB-mac#story_pane .NB-story-comment .NB-user-avatar.NB-story-comment-reshare { + left: 26px; +} +.NB-mac#story_pane .NB-story-comment .NB-story-comment-reshares .NB-user-avatar { + left: 45px; +} + +.NB-mac#story_pane .NB-story-comment .NB-button-wrapper { + margin-top: -5px; +} + +.NB-mac .NB-button.NB-share-button a { + font-size: 11px; +} + /** * iPad Wide Style */ @@ -590,6 +638,11 @@ div + p { width: calc(100% + 24px) !important; } +.NB-mac .NB-story .NB-large-image { + margin-left: -30px !important; + width: calc(100% + 60px) !important; +} + .NB-ipad-narrow .NB-story .NB-large-image { margin-left: -30px !important; width: calc(100% + 60px) !important; From 87d3cabfad13c986e4e215532915be35facecbec Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Tue, 26 Mar 2024 19:43:03 -0500 Subject: [PATCH 25/69] #1247 (Mac Catalyst edition) - Worked around Catalyst bug where the feeds list is unexpectedly inset. --- clients/ios/Classes/FeedsObjCViewController.h | 2 ++ clients/ios/Classes/FeedsObjCViewController.m | 14 ++++++++++++++ clients/ios/Resources/MainInterface.storyboard | 6 ++++-- 3 files changed, 20 insertions(+), 2 deletions(-) diff --git a/clients/ios/Classes/FeedsObjCViewController.h b/clients/ios/Classes/FeedsObjCViewController.h index 195f7da5f6..af44d05840 100644 --- a/clients/ios/Classes/FeedsObjCViewController.h +++ b/clients/ios/Classes/FeedsObjCViewController.h @@ -52,6 +52,8 @@ UIGestureRecognizerDelegate, UISearchBarDelegate> { @property (nonatomic) IBOutlet UIView *innerView; @property (nonatomic) IBOutlet UITableView *feedTitlesTable; @property (nonatomic) IBOutlet NSLayoutConstraint *feedTitlesTopConstraint; +@property (nonatomic) IBOutlet NSLayoutConstraint *feedTitlesLeadingConstraint; +@property (nonatomic) IBOutlet NSLayoutConstraint *feedTitlesTrailingConstraint; @property (nonatomic) IBOutlet UIToolbar *feedViewToolbar; @property (nonatomic) IBOutlet UISlider * feedScoreSlider; @property (nonatomic) IBOutlet UIBarButtonItem * homeButton; diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index b3c6f635c2..e27b4a9c52 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -130,7 +130,15 @@ - (void)viewDidLoad { self.searchBar.nb_searchField.textColor = UIColorFromRGB(0x0); [self.searchBar setSearchBarStyle:UISearchBarStyleMinimal]; [self.searchBar setAutocapitalizationType:UITextAutocapitalizationTypeNone]; +#if TARGET_OS_MACCATALYST + // Workaround for Catalyst bug. + self.searchBar.frame = CGRectMake(10, 0, CGRectGetWidth(self.feedTitlesTable.frame) - 100, 44.); + UIView *searchContainerView = [[UIView alloc] initWithFrame:CGRectMake(0, 0, CGRectGetWidth(self.feedTitlesTable.frame), 44.)]; + [searchContainerView addSubview:self.searchBar]; + self.feedTitlesTable.tableHeaderView = searchContainerView; +#else self.feedTitlesTable.tableHeaderView = self.searchBar; +#endif userLabelFont = [UIFont fontWithName:@"WhitneySSm-Medium" size:15.0]; @@ -210,6 +218,12 @@ - (void)viewDidLoad { self.feedTitlesTable.translatesAutoresizingMaskIntoConstraints = NO; self.feedTitlesTable.estimatedRowHeight = 0; +#if TARGET_OS_MACCATALYST + // Workaround for Catalyst bug. + self.feedTitlesLeadingConstraint.constant = -10; + self.feedTitlesTrailingConstraint.constant = -10; +#endif + if (@available(iOS 15.0, *)) { self.feedTitlesTable.sectionHeaderTopPadding = 0; } diff --git a/clients/ios/Resources/MainInterface.storyboard b/clients/ios/Resources/MainInterface.storyboard index 4361d0b908..ab12d28cb0 100644 --- a/clients/ios/Resources/MainInterface.storyboard +++ b/clients/ios/Resources/MainInterface.storyboard @@ -1,9 +1,9 @@ - + - + @@ -227,8 +227,10 @@ + + From d41280b2e15bdef0a1a89c5304204f20a5b34f7a Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Tue, 26 Mar 2024 22:02:32 -0500 Subject: [PATCH 26/69] #1247 (Mac Catalyst edition) - Fixed overlapping stories when swiping with two fingers in feed detail. - This issue was on iPad too (and may not have been Catalyst-specific). --- clients/ios/Classes/StoryPagesObjCViewController.m | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/clients/ios/Classes/StoryPagesObjCViewController.m b/clients/ios/Classes/StoryPagesObjCViewController.m index 25dae6627f..cbd71cd84e 100644 --- a/clients/ios/Classes/StoryPagesObjCViewController.m +++ b/clients/ios/Classes/StoryPagesObjCViewController.m @@ -381,6 +381,10 @@ - (void)viewDidLayoutSubviews { self.scrollView.frame = CGRectMake(frame.origin.x, frame.origin.y, floor(frame.size.width), floor(frame.size.height)); } + if (self.scrollView.subviews.lastObject != self.currentPage.view) { + [self.scrollView bringSubviewToFront:self.currentPage.view]; + } + [super viewDidLayoutSubviews]; } From 51684a78120851f5e27415a50753632aeaa47ee5 Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Thu, 4 Apr 2024 20:06:11 -0500 Subject: [PATCH 27/69] #1247 (Mac Catalyst edition) - Rewrote the trainer view using SwiftUI, to be native instead of a web view. - Added a Feed class and other data enhancements to support this. --- clients/ios/Classes/Feed.swift | 212 ++++++++++++ clients/ios/Classes/FeedDetailCardView.swift | 16 +- clients/ios/Classes/NewsBlurAppDelegate.m | 21 +- clients/ios/Classes/Story.swift | 307 +++--------------- clients/ios/Classes/StoryCache.swift | 114 +++++++ clients/ios/Classes/StorySettings.swift | 150 +++++++++ clients/ios/Classes/SwiftUIUtilities.swift | 77 +++++ clients/ios/Classes/TrainerCapsule.swift | 67 ++++ clients/ios/Classes/TrainerView.swift | 214 ++++++++++++ clients/ios/Classes/TrainerViewController.h | 6 +- clients/ios/Classes/TrainerViewController.m | 12 +- .../ios/Classes/TrainerViewController.swift | 58 ++++ clients/ios/Classes/TrainerWord.swift | 28 ++ .../ios/NewsBlur.xcodeproj/project.pbxproj | 42 +++ .../TrainerViewController.xib | 28 +- 15 files changed, 1038 insertions(+), 314 deletions(-) create mode 100644 clients/ios/Classes/Feed.swift create mode 100644 clients/ios/Classes/StoryCache.swift create mode 100644 clients/ios/Classes/StorySettings.swift create mode 100644 clients/ios/Classes/TrainerCapsule.swift create mode 100644 clients/ios/Classes/TrainerView.swift create mode 100644 clients/ios/Classes/TrainerViewController.swift create mode 100644 clients/ios/Classes/TrainerWord.swift diff --git a/clients/ios/Classes/Feed.swift b/clients/ios/Classes/Feed.swift new file mode 100644 index 0000000000..57ef2fc5ec --- /dev/null +++ b/clients/ios/Classes/Feed.swift @@ -0,0 +1,212 @@ +// +// Feed.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-04. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import Foundation + +// The Feed, Story, and StoryCache classes could be quite useful going forward; Rather than calling getStory() to get the dictionary, could have a variation that returns a Story instance. Could fetch from the cache if available, or make and cache one from the dictionary. Would need to remove it from the cache when changing anything about a story. Could perhaps make the cache part of StoriesCollection. + +/// A dictionary with the most broad key and value types, common in ObjC code. +typealias AnyDictionary = [AnyHashable : Any] + +/// A feed, wrapping the dictionary representation. +class Feed: Identifiable { + let id: String + var name = "" + var subscribers = 0 + + var dictionary = AnyDictionary() + + var isRiverOrSocial = false + + var colorBarLeft: UIColor? + var colorBarRight: UIColor? + + lazy var image: UIImage? = { + guard let appDelegate = NewsBlurAppDelegate.shared else { + return nil + } + + if let image = appDelegate.getFavicon(id) { + return Utilities.roundCorneredImage(image, radius: 4, convertTo: CGSizeMake(16, 16)) + } else { + return nil + } + }() + + var classifiers: AnyDictionary? { + guard let appDelegate = NewsBlurAppDelegate.shared else { + return nil + } + + return appDelegate.storiesCollection.activeClassifiers[id] as? AnyDictionary + } + + func classifiers(for kind: String) -> AnyDictionary? { + return classifiers?[kind] as? AnyDictionary + } + + enum Score: Int { + case none = 0 + case like = 1 + case dislike = -1 + + var imageName: String { + switch self { + case .none: + return "hand.thumbsup" + case .like: + return "hand.thumbsup.fill" + case .dislike: + return "hand.thumbsdown.fill" + } + } + } + + struct Training: Identifiable { + let name: String + let count: Int + let score: Score + + var id: String { + return name + } + } + + lazy var titles: [Training] = { + guard let appDelegate = NewsBlurAppDelegate.shared, + let classifierTitles = self.classifiers(for: "titles") else { + return [] + } + + let userTitles = classifierTitles.map { Training(name: $0.key as! String, count: 0, score: Score(rawValue: $0.value as? Int ?? 0) ?? .none) } + + return userTitles.sorted() + }() + + lazy var authors: [Training] = { + guard let appDelegate = NewsBlurAppDelegate.shared, + let classifierAuthors = self.classifiers(for: "authors"), + let activeAuthors = appDelegate.storiesCollection.activePopularAuthors as? [[AnyHashable]] else { + return [] + } + + var userAuthors = [Training]() + + for (someName, someScore) in classifierAuthors { + if let name = someName as? String, let score = someScore as? Int, !activeAuthors.contains(where: { $0[0] == someName }) { + userAuthors.append(Training(name: name, count: 0, score: Score(rawValue: score) ?? .none)) + } + } + + let otherAuthors: [Training] = activeAuthors.map { Training(name: $0[0] as! String, count: $0[1] as! Int, score: Score(rawValue: classifierAuthors[$0[0] as! String] as? Int ?? 0) ?? .none) } + + return userAuthors.sorted() + otherAuthors + }() + + lazy var tags: [Training] = { + guard let appDelegate = NewsBlurAppDelegate.shared, + let classifierTags = self.classifiers(for: "tags"), + let activeTags = appDelegate.storiesCollection.activePopularTags as? [[AnyHashable]] else { + return [] + } + + var userTags = [Training]() + + for (someName, someScore) in classifierTags { + if let name = someName as? String, let score = someScore as? Int, !activeTags.contains(where: { $0[0] == someName }) { + userTags.append(Training(name: name, count: 0, score: Score(rawValue: score) ?? .none)) + } + } + + let otherTags: [Training] = activeTags.map { Training(name: $0[0] as! String, count: $0[1] as! Int, score: Score(rawValue: classifierTags[$0[0] as! String] as? Int ?? 0) ?? .none) } + + return userTags.sorted() + otherTags + }() + + init(id: String) { + self.id = id + + guard let appDelegate = NewsBlurAppDelegate.shared else { + return + } + + var feed: [String : Any]? = appDelegate.dictActiveFeeds[id] as? [String : Any] + + if feed == nil { + feed = appDelegate.dictFeeds[id] as? [String : Any] + } + + guard let feed else { + return + } + + dictionary = feed + + load() + } + + init(dictionary: AnyDictionary) { + id = "\(dictionary["id"] ?? "")" + + self.dictionary = dictionary + + load() + } + + private func load() { + guard let appDelegate = NewsBlurAppDelegate.shared, let storiesCollection = appDelegate.storiesCollection else { + return + } + + name = dictionary["feed_title"] as? String ?? "" + subscribers = dictionary["num_subscribers"] as? Int ?? 0 + + colorBarLeft = color(for: "favicon_fade", from: dictionary, default: "707070") + colorBarRight = color(for: "favicon_color", from: dictionary, default: "505050") + + isRiverOrSocial = storiesCollection.isRiverOrSocial + } + + func color(for key: String, from feed: AnyDictionary, default defaultHex: String) -> UIColor { + let hex = feed[key] as? String ?? defaultHex + let scanner = Scanner(string: hex) + var color: Int64 = 0 + scanner.scanHexInt64(&color) + let value = Int(color) + + return ThemeManager.shared.fixedColor(fromRGB: value) ?? UIColor.gray + } +} + +extension Feed: Equatable { + static func == (lhs: Feed, rhs: Feed) -> Bool { + return lhs.id == rhs.id + } +} + +extension Feed: CustomDebugStringConvertible { + var debugDescription: String { + return "Feed \"\(name)\" (\(id))" + } +} + +extension Feed.Training: Hashable { + static func == (lhs: Feed.Training, rhs: Feed.Training) -> Bool { + return lhs.name == rhs.name + } + + func hash(into hasher: inout Hasher) { + hasher.combine(name) + } +} + +extension Feed.Training: Comparable { + static func < (lhs: Feed.Training, rhs: Feed.Training) -> Bool { + return lhs.name < rhs.name + } +} diff --git a/clients/ios/Classes/FeedDetailCardView.swift b/clients/ios/Classes/FeedDetailCardView.swift index d9beb7d8c8..1a1720060f 100644 --- a/clients/ios/Classes/FeedDetailCardView.swift +++ b/clients/ios/Classes/FeedDetailCardView.swift @@ -174,14 +174,14 @@ struct CardContentView: View { var body: some View { VStack(alignment: .leading) { - if story.isRiverOrSocial, let feedImage { + if let feed = story.feed, feed.isRiverOrSocial, let feedImage = feed.image { HStack { Image(uiImage: feedImage) .resizable() .frame(width: 16, height: 16) .padding(.leading, cache.settings.spacing == .compact ? 20 : 24) - Text(story.feedName) + Text(feed.name) .font(font(named: "WhitneySSm-Medium", size: 12)) .lineLimit(1) .foregroundColor(feedColor) @@ -245,14 +245,6 @@ struct CardContentView: View { } } - var feedImage: UIImage? { - if let image = cache.appDelegate.getFavicon(story.feedID) { - return Utilities.roundCorneredImage(image, radius: 4, convertTo: CGSizeMake(16, 16)) - } else { - return nil - } - } - var unreadImage: UIImage? { guard story.isReadAvailable else { return nil @@ -310,7 +302,7 @@ struct CardFeedBarView: View { var body: some View { GeometryReader { geometry in - if let color = story.feedColorBarLeft { + if let feed = story.feed, let color = feed.colorBarLeft { Path { path in path.move(to: CGPoint(x: 0, y: 0)) path.addLine(to: CGPoint(x: 0, y: geometry.size.height)) @@ -318,7 +310,7 @@ struct CardFeedBarView: View { .stroke(Color(color), lineWidth: 4) } - if let color = story.feedColorBarRight { + if let feed = story.feed, let color = feed.colorBarRight { Path { path in path.move(to: CGPoint(x: 4, y: 0)) path.addLine(to: CGPoint(x: 4, y: geometry.size.height)) diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 41a6df37b7..1d6ba4f4eb 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -1310,9 +1310,9 @@ - (void)openTrainSite { - (void)openTrainSiteWithFeedLoaded:(BOOL)feedLoaded from:(id)sender { UINavigationController *navController = self.feedsNavigationController; - trainerViewController.feedTrainer = YES; - trainerViewController.storyTrainer = NO; - trainerViewController.feedLoaded = feedLoaded; + trainerViewController.isStoryTrainer = NO; + trainerViewController.isFeedLoaded = feedLoaded; + [trainerViewController reload]; if (!self.isPhone) { // trainerViewController.modalPresentationStyle=UIModalPresentationFormSheet; @@ -1330,9 +1330,10 @@ - (void)openTrainSiteWithFeedLoaded:(BOOL)feedLoaded from:(id)sender { - (void)openTrainStory:(id)sender { UINavigationController *navController = self.feedsNavigationController; - trainerViewController.feedTrainer = NO; - trainerViewController.storyTrainer = YES; - trainerViewController.feedLoaded = YES; + trainerViewController.isStoryTrainer = YES; + trainerViewController.isFeedLoaded = YES; + [trainerViewController reload]; + if (!self.isPhone) { [self showPopoverWithViewController:self.trainerViewController contentSize:CGSizeMake(500, 630) sender:sender]; } else { @@ -3910,7 +3911,7 @@ - (void)toggleAuthorClassifier:(NSString *)author feedId:(NSString *)feedId { [feedClassifiers setObject:authors forKey:@"authors"]; [storiesCollection.activeClassifiers setObject:feedClassifiers forKey:feedId]; [self.storyPagesViewController refreshHeaders]; - [self.trainerViewController refresh]; + [self.trainerViewController reload]; NSString *urlString = [NSString stringWithFormat:@"%@/classifier/save", self.url]; @@ -3955,7 +3956,7 @@ - (void)toggleTagClassifier:(NSString *)tag feedId:(NSString *)feedId { [feedClassifiers setObject:tags forKey:@"tags"]; [storiesCollection.activeClassifiers setObject:feedClassifiers forKey:feedId]; [self.storyPagesViewController refreshHeaders]; - [self.trainerViewController refresh]; + [self.trainerViewController reload]; NSString *urlString = [NSString stringWithFormat:@"%@/classifier/save", self.url]; @@ -4004,7 +4005,7 @@ - (void)toggleTitleClassifier:(NSString *)title feedId:(NSString *)feedId score: [feedClassifiers setObject:titles forKey:@"titles"]; [storiesCollection.activeClassifiers setObject:feedClassifiers forKey:feedId]; [self.storyPagesViewController refreshHeaders]; - [self.trainerViewController refresh]; + [self.trainerViewController reload]; NSString *urlString = [NSString stringWithFormat:@"%@/classifier/save", self.url]; @@ -4047,7 +4048,7 @@ - (void)toggleFeedClassifier:(NSString *)feedId { [feedClassifiers setObject:feeds forKey:@"feeds"]; [storiesCollection.activeClassifiers setObject:feedClassifiers forKey:feedId]; [self.storyPagesViewController refreshHeaders]; - [self.trainerViewController refresh]; + [self.trainerViewController reload]; NSString *urlString = [NSString stringWithFormat:@"%@/classifier/save", self.url]; diff --git a/clients/ios/Classes/Story.swift b/clients/ios/Classes/Story.swift index ec697353e7..62b819d28e 100644 --- a/clients/ios/Classes/Story.swift +++ b/clients/ios/Classes/Story.swift @@ -8,17 +8,17 @@ import Foundation -// The Story and StoryCache classes could be quite useful going forward; Rather than calling getStory() to get the dictionary, could have a variation that returns a Story instance. Could fetch from the cache if available, or make and cache one from the dictionary. Would need to remove it from the cache when changing anything about a story. Could perhaps make the cache part of StoriesCollection. +// The Feed, Story, and StoryCache classes could be quite useful going forward; Rather than calling getStory() to get the dictionary, could have a variation that returns a Story instance. Could fetch from the cache if available, or make and cache one from the dictionary. Would need to remove it from the cache when changing anything about a story. Could perhaps make the cache part of StoriesCollection. /// A story, wrapping the dictionary representation. class Story: Identifiable { let id = UUID() let index: Int - var dictionary = [String : Any]() + var dictionary = AnyDictionary() + + var feed: Feed? - var feedID = "" - var feedName = "" var title = "" var content = "" var dateString = "" @@ -35,9 +35,34 @@ class Story: Identifiable { return author.isEmpty ? dateString : "\(dateString) · \(author)" } - var isRiverOrSocial = true - var feedColorBarLeft: UIColor? - var feedColorBarRight: UIColor? + var titles: [Feed.Training] { + guard let classifiers = feed?.classifiers(for: "titles") else { + return [] + } + + let lowercasedTitle = title.lowercased() + let keys = classifiers.keys.compactMap { $0 as? String } + let words = keys.filter { lowercasedTitle.contains($0.lowercased()) } + let sorted = words.sorted() + + return sorted.map { Feed.Training(name: $0, count: 0, score: Feed.Score(rawValue: classifiers[$0] as? Int ?? 0) ?? .none) } + } + + var authors: [Feed.Training] { + guard let classifiers = feed?.classifiers(for: "authors") else { + return [] + } + + return [Feed.Training(name: author, count: 0, score: Feed.Score(rawValue: classifiers[author] as? Int ?? 0) ?? .none)] + } + + var tags: [Feed.Training] { + guard let tags = dictionary["story_tags"] as? [String], let classifiers = feed?.classifiers(for: "tags") else { + return [] + } + + return tags.map { Feed.Training(name: $0, count: 0, score: Feed.Score(rawValue: classifiers[$0] as? Int ?? 0) ?? .none) } + } var isSelected: Bool { return index == NewsBlurAppDelegate.shared!.storiesCollection.locationOfActiveStory() @@ -79,24 +104,13 @@ class Story: Identifiable { dictionary = story - if let id = dictionary["story_feed_id"] { - feedID = appDelegate.feedIdWithoutSearchQuery("\(id)") - } - - var feed: [String : Any]? - - if storiesCollection.isRiverOrSocial { - feed = appDelegate.dictActiveFeeds[feedID] as? [String : Any] - } - - if feed == nil { - feed = appDelegate.dictFeeds[feedID] as? [String : Any] - } - - if let feed { - feedName = feed["feed_title"] as? String ?? "" - feedColorBarLeft = color(for: "favicon_fade", from: feed, default: "707070") - feedColorBarRight = color(for: "favicon_color", from: feed, default: "505050") + if let dictID = dictionary["story_feed_id"], let id = appDelegate.feedIdWithoutSearchQuery("\(dictID)") { + if let cachedFeed = StoryCache.feeds[id] { + feed = cachedFeed + } else { + feed = Feed(id: id) + StoryCache.feeds[id] = feed + } } title = (string(for: "story_title") as NSString).decodingHTMLEntities() @@ -114,17 +128,6 @@ class Story: Identifiable { isRead = !storiesCollection .isStoryUnread(dictionary) isReadAvailable = storiesCollection.activeFolder != "saved_stories" - isRiverOrSocial = storiesCollection.isRiverOrSocial - } - - func color(for key: String, from feed: [String : Any], default defaultHex: String) -> UIColor { - let hex = feed[key] as? String ?? defaultHex - let scanner = Scanner(string: hex) - var color: Int64 = 0 - scanner.scanHexInt64(&color) - let value = Int(color) - - return ThemeManager.shared.fixedColor(fromRGB: value) ?? UIColor.gray } } @@ -136,236 +139,6 @@ extension Story: Equatable { extension Story: CustomDebugStringConvertible { var debugDescription: String { - return "Story #\(index) \"\(title)\" in \(feedName)" - } -} - -/// A cache of stories for the feed detail grid view. -class StoryCache: ObservableObject { - let appDelegate = NewsBlurAppDelegate.shared! - - let settings = StorySettings() - - var isDarkTheme: Bool { - return ThemeManager.shared.isDarkTheme - } - - var isGrid: Bool { - return appDelegate.detailViewController.layout == .grid - } - - var isPhone: Bool { - return appDelegate.detailViewController.isPhone - } - - var canPullToRefresh: Bool { - return appDelegate.feedDetailViewController.canPullToRefresh - } - - @Published var before = [Story]() - @Published var selected: Story? - @Published var after = [Story]() - - var all: [Story] { - if let selected { - return before + [selected] + after - } else { - return before + after - } - } - - func story(with index: Int) -> Story? { - return all.first(where: { $0.index == index } ) - } - - func reload() { - let debug = Date() - let storyCount = Int(appDelegate.storiesCollection.storyLocationsCount) - var beforeSelection = [Int]() - var selectedIndex = -999 - var afterSelection = [Int]() - - if storyCount > 0 { - selectedIndex = appDelegate.storiesCollection.locationOfActiveStory() - - if selectedIndex < 0 { - beforeSelection = Array(0..= 0 ? Story(index: selectedIndex) : nil - after = afterSelection.map { Story(index: $0) } - - print("🪿 Reload: \(before.count) before, \(selected == nil ? "none" : selected!.debugTitle) selected, \(after.count) after, took \(-debug.timeIntervalSinceNow) seconds") - - -// -// #warning("hack") -// -// print("🪿 ... count: \(storyCount), index: \(selectedIndex)") -// print("🪿 ... before: \(before)") -// print("🪿 ... selection: \(selected == nil ? "none" : selected!.debugTitle)") -// print("🪿 ... after: \(after)") - - - - } - - func reload(story: Story) { - if story == selected { - selected = Story(index: story.index) - } else if let index = before.firstIndex(of: story) { - before[index] = Story(index: story.index) - } else if let index = after.firstIndex(of: story) { - after[index] = Story(index: story.index) - } - } -} - -class StorySettings { - let defaults = UserDefaults.standard - - enum Content: String, RawRepresentable { - case title - case short - case medium - case long - - static let titleLimit = 6 - - static let contentLimit = 10 - - var limit: Int { - switch self { - case .title: - return 6 - case .short: - return 2 - case .medium: - return 4 - case .long: - return 6 - } - } - } - - var content: Content { - if let string = defaults.string(forKey: "story_list_preview_text_size"), let value = Content(rawValue: string) { - return value - } else { - return .short - } - } - - enum Preview: String, RawRepresentable { - case none - case smallLeft = "small_left" - case largeLeft = "large_left" - case largeRight = "large_right" - case smallRight = "small_right" - - var isLeft: Bool { - return [.smallLeft, .largeLeft].contains(self) - } - - var isSmall: Bool { - return [.smallLeft, .smallRight].contains(self) - } - } - - var preview: Preview { - if let string = defaults.string(forKey: "story_list_preview_images_size"), let value = Preview(rawValue: string) { - return value - } else { - return .smallRight - } - } - - enum FontSize: String, RawRepresentable { - case xs - case small - case medium - case large - case xl - - var offset: CGFloat { - switch self { - case .xs: - return -2 - case .small: - return -1 - case .medium: - return 0 - case .large: - return 1 - case .xl: - return 2 - } - } - } - - var fontSize: FontSize { - if let string = defaults.string(forKey: "feed_list_font_size"), let value = FontSize(rawValue: string) { - return value - } else { - return .medium - } - } - - enum Spacing: String, RawRepresentable { - case compact - case comfortable - } - - var spacing: Spacing { - if let string = defaults.string(forKey: "feed_list_spacing"), let value = Spacing(rawValue: string) { - return value - } else { - return .comfortable - } - } - - var gridColumns: Int { - guard let pref = UserDefaults.standard.string(forKey: "grid_columns"), let columns = Int(pref) else { - if NewsBlurAppDelegate.shared.isCompactWidth { - return 1 - } else if NewsBlurAppDelegate.shared.isPortrait || NewsBlurAppDelegate.shared.isPhone { - return 2 - } else { - return 4 - } - } - - if NewsBlurAppDelegate.shared.isPortrait, columns > 3 { - return 3 - } - - return columns - } - - var gridHeight: CGFloat { - guard let pref = UserDefaults.standard.string(forKey: "grid_height") else { - return 400 - } - - switch pref { - case "xs": - return 250 - case "short": - return 300 - case "tall": - return 500 - case "xl": - return 600 - default: - return 400 - } + return "Story #\(index) \"\(title)\" in \(feed?.name ?? "")" } } diff --git a/clients/ios/Classes/StoryCache.swift b/clients/ios/Classes/StoryCache.swift new file mode 100644 index 0000000000..9b30139f86 --- /dev/null +++ b/clients/ios/Classes/StoryCache.swift @@ -0,0 +1,114 @@ +// +// StoryCache.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-04. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import Foundation + +// The Feed, Story, and StoryCache classes could be quite useful going forward; Rather than calling getStory() to get the dictionary, could have a variation that returns a Story instance. Could fetch from the cache if available, or make and cache one from the dictionary. Would need to remove it from the cache when changing anything about a story. Could perhaps make the cache part of StoriesCollection. + +/// A cache of stories for the feed detail grid view. +class StoryCache: ObservableObject { + let appDelegate = NewsBlurAppDelegate.shared! + + let settings = StorySettings() + + var isDarkTheme: Bool { + return ThemeManager.shared.isDarkTheme + } + + var isGrid: Bool { + return appDelegate.detailViewController.layout == .grid + } + + var isPhone: Bool { + return appDelegate.detailViewController.isPhone + } + + var canPullToRefresh: Bool { + return appDelegate.feedDetailViewController.canPullToRefresh + } + + @Published var before = [Story]() + @Published var selected: Story? + @Published var after = [Story]() + + var all: [Story] { + if let selected { + return before + [selected] + after + } else { + return before + after + } + } + + func story(with index: Int) -> Story? { + return all.first(where: { $0.index == index } ) + } + + static var feeds = [String : Feed]() + + var currentFeed: Feed? + + func reload() { + let debug = Date() + let storyCount = Int(appDelegate.storiesCollection.storyLocationsCount) + var beforeSelection = [Int]() + var selectedIndex = -999 + var afterSelection = [Int]() + + if storyCount > 0 { + selectedIndex = appDelegate.storiesCollection.locationOfActiveStory() + + if selectedIndex < 0 { + beforeSelection = Array(0..= 0 ? Story(index: selectedIndex) : nil + after = afterSelection.map { Story(index: $0) } + + print("🪿 Reload: \(before.count) before, \(selected == nil ? "none" : selected!.debugTitle) selected, \(after.count) after, took \(-debug.timeIntervalSinceNow) seconds") + + + // + // #warning("hack") + // + // print("🪿 ... count: \(storyCount), index: \(selectedIndex)") + // print("🪿 ... before: \(before)") + // print("🪿 ... selection: \(selected == nil ? "none" : selected!.debugTitle)") + // print("🪿 ... after: \(after)") + + + + } + + func reload(story: Story) { + if story == selected { + selected = Story(index: story.index) + } else if let index = before.firstIndex(of: story) { + before[index] = Story(index: story.index) + } else if let index = after.firstIndex(of: story) { + after[index] = Story(index: story.index) + } + } +} diff --git a/clients/ios/Classes/StorySettings.swift b/clients/ios/Classes/StorySettings.swift new file mode 100644 index 0000000000..f318b18100 --- /dev/null +++ b/clients/ios/Classes/StorySettings.swift @@ -0,0 +1,150 @@ +// +// StorySettings.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-04. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import Foundation + +class StorySettings { + let defaults = UserDefaults.standard + + enum Content: String, RawRepresentable { + case title + case short + case medium + case long + + static let titleLimit = 6 + + static let contentLimit = 10 + + var limit: Int { + switch self { + case .title: + return 6 + case .short: + return 2 + case .medium: + return 4 + case .long: + return 6 + } + } + } + + var content: Content { + if let string = defaults.string(forKey: "story_list_preview_text_size"), let value = Content(rawValue: string) { + return value + } else { + return .short + } + } + + enum Preview: String, RawRepresentable { + case none + case smallLeft = "small_left" + case largeLeft = "large_left" + case largeRight = "large_right" + case smallRight = "small_right" + + var isLeft: Bool { + return [.smallLeft, .largeLeft].contains(self) + } + + var isSmall: Bool { + return [.smallLeft, .smallRight].contains(self) + } + } + + var preview: Preview { + if let string = defaults.string(forKey: "story_list_preview_images_size"), let value = Preview(rawValue: string) { + return value + } else { + return .smallRight + } + } + + enum FontSize: String, RawRepresentable { + case xs + case small + case medium + case large + case xl + + var offset: CGFloat { + switch self { + case .xs: + return -2 + case .small: + return -1 + case .medium: + return 0 + case .large: + return 1 + case .xl: + return 2 + } + } + } + + var fontSize: FontSize { + if let string = defaults.string(forKey: "feed_list_font_size"), let value = FontSize(rawValue: string) { + return value + } else { + return .medium + } + } + + enum Spacing: String, RawRepresentable { + case compact + case comfortable + } + + var spacing: Spacing { + if let string = defaults.string(forKey: "feed_list_spacing"), let value = Spacing(rawValue: string) { + return value + } else { + return .comfortable + } + } + + var gridColumns: Int { + guard let pref = UserDefaults.standard.string(forKey: "grid_columns"), let columns = Int(pref) else { + if NewsBlurAppDelegate.shared.isCompactWidth { + return 1 + } else if NewsBlurAppDelegate.shared.isPortrait || NewsBlurAppDelegate.shared.isPhone { + return 2 + } else { + return 4 + } + } + + if NewsBlurAppDelegate.shared.isPortrait, columns > 3 { + return 3 + } + + return columns + } + + var gridHeight: CGFloat { + guard let pref = UserDefaults.standard.string(forKey: "grid_height") else { + return 400 + } + + switch pref { + case "xs": + return 250 + case "short": + return 300 + case "tall": + return 500 + case "xl": + return 600 + default: + return 400 + } + } +} diff --git a/clients/ios/Classes/SwiftUIUtilities.swift b/clients/ios/Classes/SwiftUIUtilities.swift index 6ef9e39830..c191dbc954 100644 --- a/clients/ios/Classes/SwiftUIUtilities.swift +++ b/clients/ios/Classes/SwiftUIUtilities.swift @@ -38,6 +38,16 @@ extension View { } } +extension Text { + func colored(_ color: Color) -> Text { + if #available(iOS 17.0, *) { + self.foregroundStyle(color) + } else { + self.foregroundColor(color) + } + } +} + struct RoundedCorner: Shape { var radius: CGFloat = .infinity var corners: UIRectCorner = .allCorners @@ -131,3 +141,70 @@ struct OffsetObservingScrollView: View { .coordinateSpace(name: coordinateSpaceName) } } + +struct WrappingHStack: View where Model: Hashable, V: View { + typealias ViewGenerator = (Model) -> V + + var models: [Model] + var horizontalSpacing: CGFloat = 2 + var verticalSpacing: CGFloat = 0 + var viewGenerator: ViewGenerator + + @State private var totalHeight + = CGFloat.zero // << variant for ScrollView/List + // = CGFloat.infinity // << variant for VStack + + var body: some View { + VStack { + GeometryReader { geometry in + self.generateContent(in: geometry) + } + } + .frame(height: totalHeight)// << variant for ScrollView/List + //.frame(maxHeight: totalHeight) // << variant for VStack + } + + private func generateContent(in geometry: GeometryProxy) -> some View { + var width = CGFloat.zero + var height = CGFloat.zero + + return ZStack(alignment: .topLeading) { + ForEach(self.models, id: \.self) { models in + viewGenerator(models) + .padding(.horizontal, horizontalSpacing) + .padding(.vertical, verticalSpacing) + .alignmentGuide(.leading, computeValue: { dimension in + if (abs(width - dimension.width) > geometry.size.width) + { + width = 0 + height -= dimension.height + } + let result = width + if models == self.models.last! { + width = 0 //last item + } else { + width -= dimension.width + } + return result + }) + .alignmentGuide(.top, computeValue: {dimension in + let result = height + if models == self.models.last! { + height = 0 // last item + } + return result + }) + } + }.background(viewHeightReader($totalHeight)) + } + + private func viewHeightReader(_ binding: Binding) -> some View { + return GeometryReader { geometry -> Color in + let rect = geometry.frame(in: .local) + DispatchQueue.main.async { + binding.wrappedValue = rect.size.height + } + return .clear + } + } +} diff --git a/clients/ios/Classes/TrainerCapsule.swift b/clients/ios/Classes/TrainerCapsule.swift new file mode 100644 index 0000000000..e8f3eb02e9 --- /dev/null +++ b/clients/ios/Classes/TrainerCapsule.swift @@ -0,0 +1,67 @@ +// +// TrainerCapsule.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-02. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import SwiftUI + +struct TrainerCapsule: View { + var score: Feed.Score + + var header: String + + var image: UIImage? + + var value: String + + var count: Int = 0 + + var body: some View { + HStack { + HStack { + Image(systemName: score.imageName) + .foregroundColor(.white) + + content + } + .padding([.top, .bottom], 5) + .padding([.leading, .trailing], 10) + .background(score == .like ? Color(red: 0, green: 0.5, blue: 0.0) : score == .dislike ? Color.red : Color(white: ThemeManager.shared.isSystemDark ? 0.35 : 0.6)) + .clipShape(Capsule()) + + if count > 0 { + Text("x \(count)") + .colored(.gray) + .padding([.trailing], 10) + } + } + } + + var content: Text { + Text("\(Text("\(header):").colored(.init(white: 0.85))) \(imageText)\(value)") + .colored(.white) + } + + var imageText: Text { + if let image { + Text(Image(uiImage: image)).baselineOffset(-3) + Text(" ") + } else { + Text("") + } + } +} + +#Preview { + TrainerCapsule(score: .none, header: "Tag", value: "None Example") +} + +#Preview { + TrainerCapsule(score: .like, header: "Tag", value: "Liked Example") +} + +#Preview { + TrainerCapsule(score: .dislike, header: "Tag", value: "Disliked Example") +} diff --git a/clients/ios/Classes/TrainerView.swift b/clients/ios/Classes/TrainerView.swift new file mode 100644 index 0000000000..7fb0c7110a --- /dev/null +++ b/clients/ios/Classes/TrainerView.swift @@ -0,0 +1,214 @@ +// +// TrainerView.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-02. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import SwiftUI + +/// A protocol of interaction between the trainer view and the enclosing view controller. +protocol TrainerInteraction { + var isStoryTrainer: Bool { get set } +} + +struct TrainerView: View { + var interaction: TrainerInteraction + + @ObservedObject var cache: StoryCache + + let columns = [GridItem(.adaptive(minimum: 50))] + + var body: some View { + VStack(alignment: .leading) { + Text("What do you 👠\(Text("like").colored(.green)) and 👎 \(Text("dislike").colored(.red)) about this \(feedOrStoryLowercase)?") + .font(font(named: "WhitneySSm-Medium", size: 16)) + .padding() + + List { + Section(content: { + VStack(alignment: .leading) { + if interaction.isStoryTrainer { + Text("Choose one or more words from the title:") + .font(font(named: "WhitneySSm-Medium", size: 12)) + .padding([.top], 10) + + WrappingHStack(models: titleWords, horizontalSpacing: 1) { word in + Button(action: { + if addingTitle.isEmpty { + addingTitle = word + } else { + addingTitle.append(" \(word)") + } + }, label: { + TrainerWord(word: word) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + } + + if !addingTitle.isEmpty { + HStack { + Button(action: { + cache.appDelegate.toggleTitleClassifier(addingTitle, feedId: cache.currentFeed?.id, score: 0) + addingTitle = "" + }, label: { + TrainerCapsule(score: .none, header: "Title", value: addingTitle) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + + Button { + addingTitle = "" + } label: { + Image(systemName: "xmark.circle.fill") + .imageScale(.large) + .foregroundColor(.gray) + } + } + } + } + + WrappingHStack(models: titles) { title in + Button(action: { + cache.appDelegate.toggleTitleClassifier(title.name, feedId: cache.currentFeed?.id, score: 0) + }, label: { + TrainerCapsule(score: title.score, header: "Title", value: title.name, count: title.count) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + } + } + }, header: { + header(story: "Story Title", feed: "Titles & Phrases") + }) + + Section(content: { + WrappingHStack(models: authors) { author in + Button(action: { + cache.appDelegate.toggleAuthorClassifier(author.name, feedId: cache.currentFeed?.id) + }, label: { + TrainerCapsule(score: author.score, header: "Author", value: author.name, count: author.count) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + } + }, header: { + header(story: "Story Author", feed: "Authors") + }) + + Section(content: { + WrappingHStack(models: tags) { tag in + Button(action: { + cache.appDelegate.toggleTagClassifier(tag.name, feedId: cache.currentFeed?.id) + }, label: { + TrainerCapsule(score: tag.score, header: "Tag", value: tag.name, count: tag.count) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + } + }, header: { + header(story: "Story Categories & Tags", feed: "Categories & Tags") + }) + + Section(content: { + HStack { + if let feed = cache.currentFeed { + Button(action: { + cache.appDelegate.toggleFeedClassifier(feed.id) + }, label: { + TrainerCapsule(score: score(key: "feeds", value: feed.id), header: "Site", image: feed.image, value: feed.name) + }) + .buttonStyle(BorderlessButtonStyle()) + .padding([.top, .bottom], 5) + } + } + }, header: { + header(feed: "Everything by This Publisher") + }) + } + .font(font(named: "WhitneySSm-Medium", size: 12)) + } + .onAppear { + addingTitle = "" + } + } + + func font(named: String, size: CGFloat) -> Font { + return Font.custom(named, size: size + cache.settings.fontSize.offset, relativeTo: .caption) + } + + func reload() { + cache.reload() + addingTitle = "" + } + + var feedOrStoryLowercase: String { + return interaction.isStoryTrainer ? "story" : "site" + } + + @ViewBuilder + func header(story: String? = nil, feed: String) -> some View { + if let story { + Text(interaction.isStoryTrainer ? story : feed) + .font(font(named: "WhitneySSm-Medium", size: 16)) + } else { + Text(feed) + .font(font(named: "WhitneySSm-Medium", size: 16)) + } + } + + func score(key: String, value: String) -> Feed.Score { + guard let classifiers = cache.currentFeed?.classifiers(for: key), + let score = classifiers[value] as? Int else { + return .none + } + + if score > 0 { + return .like + } else if score < 0 { + return .dislike + } else { + return .none + } + } + + var titleWords: [String] { + if interaction.isStoryTrainer, let story = cache.selected { + return story.title.components(separatedBy: .whitespaces) + } else { + return [] + } + } + + @State private var addingTitle = "" + + var titles: [Feed.Training] { + if interaction.isStoryTrainer { + return cache.selected?.titles ?? [] + } else { + return cache.currentFeed?.titles ?? [] + } + } + + var authors: [Feed.Training] { + if interaction.isStoryTrainer { + return cache.selected?.authors ?? [] + } else { + return cache.currentFeed?.authors ?? [] + } + } + + var tags: [Feed.Training] { + if interaction.isStoryTrainer { + return cache.selected?.tags ?? [] + } else { + return cache.currentFeed?.tags ?? [] + } + } +} + +//#Preview { +// TrainerViewController() +//} diff --git a/clients/ios/Classes/TrainerViewController.h b/clients/ios/Classes/TrainerViewController.h index d9708988bf..7e5b425897 100644 --- a/clients/ios/Classes/TrainerViewController.h +++ b/clients/ios/Classes/TrainerViewController.h @@ -6,6 +6,10 @@ // Copyright (c) 2012 NewsBlur. All rights reserved. // + +#warning This code is obsolete, and will be removed once the SwiftUI implementation is complete. + + #import #import "BaseViewController.h" #import "NewsBlurAppDelegate.h" @@ -21,7 +25,7 @@ @end -@interface TrainerViewController : BaseViewController { +@interface OldTrainerViewController : BaseViewController { IBOutlet UIBarButtonItem * closeButton; TrainerWebView *webView; IBOutlet UINavigationBar *navBar; diff --git a/clients/ios/Classes/TrainerViewController.m b/clients/ios/Classes/TrainerViewController.m index 628f3ead77..819a865722 100644 --- a/clients/ios/Classes/TrainerViewController.m +++ b/clients/ios/Classes/TrainerViewController.m @@ -6,13 +6,17 @@ // Copyright (c) 2012 NewsBlur. All rights reserved. // + +#warning This code is obsolete, and will be removed once the SwiftUI implementation is complete. + + #import "TrainerViewController.h" #import "StringHelper.h" #import "Utilities.h" #import "AFNetworking.h" #import "StoriesCollection.h" -@implementation TrainerViewController +@implementation OldTrainerViewController @synthesize closeButton; @synthesize webView; @@ -543,7 +547,7 @@ - (NSString *)makeClassifier:(NSString *)classifierName withType:(NSString *)cla - (IBAction)doCloseDialog:(id)sender { [appDelegate hidePopover]; - [appDelegate.trainerViewController dismissViewControllerAnimated:YES completion:nil]; +// [appDelegate.trainerViewController dismissViewControllerAnimated:YES completion:nil]; } - (void)changeTitle:(id)sender score:(int)score { @@ -604,12 +608,12 @@ - (BOOL)canPerformAction:(SEL)action withSender:(id)sender { - (void)focusTitle:(id)sender { NewsBlurAppDelegate *appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - [appDelegate.trainerViewController changeTitle:sender score:1]; +// [appDelegate.trainerViewController changeTitle:sender score:1]; } - (void)hideTitle:(id)sender { NewsBlurAppDelegate *appDelegate = [NewsBlurAppDelegate sharedAppDelegate]; - [appDelegate.trainerViewController changeTitle:sender score:-1]; +// [appDelegate.trainerViewController changeTitle:sender score:-1]; } // Work around iOS 9 issue where menu doesn't appear the first time diff --git a/clients/ios/Classes/TrainerViewController.swift b/clients/ios/Classes/TrainerViewController.swift new file mode 100644 index 0000000000..df551e099b --- /dev/null +++ b/clients/ios/Classes/TrainerViewController.swift @@ -0,0 +1,58 @@ +// +// TrainerViewController.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-01. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import SwiftUI + +@objc class TrainerViewController: BaseViewController { + @objc var isStoryTrainer = false + + @objc var isFeedLoaded = false + + lazy var hostingController = makeHostingController() + + var trainerView: TrainerView { + return hostingController.rootView + } + + var storyCache: StoryCache { + return appDelegate.feedDetailViewController.storyCache + } + + private func makeHostingController() -> UIHostingController { + let trainerView = TrainerView(interaction: self, cache: storyCache) + let trainerController = UIHostingController(rootView: trainerView) + trainerController.view.translatesAutoresizingMaskIntoConstraints = false + + return trainerController + } + + override func viewDidLoad() { + super.viewDidLoad() + + addChild(hostingController) + view.addSubview(hostingController.view) + hostingController.didMove(toParent: self) + + NSLayoutConstraint.activate([ + hostingController.view.topAnchor.constraint(equalTo: view.topAnchor), + hostingController.view.leadingAnchor.constraint(equalTo: view.leadingAnchor), + hostingController.view.trailingAnchor.constraint(equalTo: view.trailingAnchor), + hostingController.view.bottomAnchor.constraint(equalTo: view.safeAreaLayoutGuide.bottomAnchor) + ]) + +// changedLayout() + } + + @objc func reload() { + trainerView.reload() + } +} + +extension TrainerViewController: TrainerInteraction { + //TODO: 🚧 +} diff --git a/clients/ios/Classes/TrainerWord.swift b/clients/ios/Classes/TrainerWord.swift new file mode 100644 index 0000000000..b5309a0e78 --- /dev/null +++ b/clients/ios/Classes/TrainerWord.swift @@ -0,0 +1,28 @@ +// +// TrainerWord.swift +// NewsBlur +// +// Created by David Sinclair on 2024-04-03. +// Copyright © 2024 NewsBlur. All rights reserved. +// + +import SwiftUI + +struct TrainerWord: View { + var word: String + + var body: some View { + HStack { + Text(word) + .colored(Color(white: ThemeManager.shared.isSystemDark ? 0.8 : 0.1)) + .padding([.top, .bottom], 1) + .padding([.leading, .trailing], 1) + .background(Color(white: ThemeManager.shared.isSystemDark ? 0.35 : 0.95)) + .clipShape(RoundedRectangle(cornerRadius: 5)) + } + } +} + +#Preview { + TrainerWord(word: "Example") +} diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index 3b57a7fbb8..df7a65f022 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -20,6 +20,10 @@ 17150E1E2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; 17150E1F2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; 1715D02B2166B3F900227731 /* PremiumManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 1715D02A2166B3F900227731 /* PremiumManager.m */; }; + 171904B52BBC8D4E004CCC96 /* TrainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B42BBC8D4E004CCC96 /* TrainerView.swift */; }; + 171904B62BBC8D4E004CCC96 /* TrainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B42BBC8D4E004CCC96 /* TrainerView.swift */; }; + 171904B82BBCA712004CCC96 /* TrainerCapsule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */; }; + 171904B92BBCA712004CCC96 /* TrainerCapsule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */; }; 171B6FFD25C4C7C8008638A9 /* StoryPagesViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171B6FFC25C4C7C8008638A9 /* StoryPagesViewController.swift */; }; 1721C9D12497F91A00B0EDC4 /* mute_gray.png in Resources */ = {isa = PBXBuildFile; fileRef = 1721C9D02497F91900B0EDC4 /* mute_gray.png */; }; 1723388B26BE43EB00610784 /* WidgetLoader.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1723388A26BE43EB00610784 /* WidgetLoader.swift */; }; @@ -710,6 +714,8 @@ 175792DA2930605500490924 /* CFNetwork.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 78095E3E128EF35400230C8E /* CFNetwork.framework */; }; 175792E72930611C00490924 /* LaunchScreenDev.xib in Resources */ = {isa = PBXBuildFile; fileRef = 175792E62930611B00490924 /* LaunchScreenDev.xib */; }; 175792E92930617600490924 /* logo_newsblur_512-dev.png in Resources */ = {isa = PBXBuildFile; fileRef = 175792E82930617600490924 /* logo_newsblur_512-dev.png */; }; + 175DC6AF2BBB87D200B3708F /* TrainerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 175DC6AE2BBB87D200B3708F /* TrainerViewController.swift */; }; + 175DC6B02BBB87D200B3708F /* TrainerViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 175DC6AE2BBB87D200B3708F /* TrainerViewController.swift */; }; 175FAC4C23AB34EB002AC38C /* menu_icn_widget.png in Resources */ = {isa = PBXBuildFile; fileRef = 175FAC4A23AB34EB002AC38C /* menu_icn_widget.png */; }; 175FAC4D23AB34EB002AC38C /* menu_icn_widget@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 175FAC4B23AB34EB002AC38C /* menu_icn_widget@2x.png */; }; 176129601C630AEB00702FE4 /* mute_feed_off.png in Resources */ = {isa = PBXBuildFile; fileRef = 1761295C1C630AEB00702FE4 /* mute_feed_off.png */; }; @@ -772,6 +778,14 @@ 17B14BDD23E24B4E00CF8D2C /* menu_icn_statistics@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 17B14BDB23E24B4E00CF8D2C /* menu_icn_statistics@2x.png */; }; 17B33D1827D97282009108AD /* g_icn_folder_widget.png in Resources */ = {isa = PBXBuildFile; fileRef = 17B33D1627D97281009108AD /* g_icn_folder_widget.png */; }; 17B33D1927D97282009108AD /* g_icn_folder_widget@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 17B33D1727D97282009108AD /* g_icn_folder_widget@2x.png */; }; + 17BC56A72BBE4A5600A30C41 /* TrainerWord.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56A62BBE4A5600A30C41 /* TrainerWord.swift */; }; + 17BC56A82BBE4A5600A30C41 /* TrainerWord.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56A62BBE4A5600A30C41 /* TrainerWord.swift */; }; + 17BC56AA2BBF6BC000A30C41 /* Feed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56A92BBF6BC000A30C41 /* Feed.swift */; }; + 17BC56AB2BBF6BC000A30C41 /* Feed.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56A92BBF6BC000A30C41 /* Feed.swift */; }; + 17BC56AD2BBF6C0000A30C41 /* StoryCache.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56AC2BBF6C0000A30C41 /* StoryCache.swift */; }; + 17BC56AE2BBF6C0000A30C41 /* StoryCache.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56AC2BBF6C0000A30C41 /* StoryCache.swift */; }; + 17BC56B02BBF6C2200A30C41 /* StorySettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56AF2BBF6C2200A30C41 /* StorySettings.swift */; }; + 17BC56B12BBF6C2200A30C41 /* StorySettings.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17BC56AF2BBF6C2200A30C41 /* StorySettings.swift */; }; 17BD3BA52271102500F615EC /* Intents.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17BD3BA42271102500F615EC /* Intents.framework */; settings = {ATTRIBUTES = (Weak, ); }; }; 17BD3BA72271122800F615EC /* CoreSpotlight.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17BD3BA62271122800F615EC /* CoreSpotlight.framework */; }; 17BD3BA92271125400F615EC /* CoreServices.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 17BD3BA82271125400F615EC /* CoreServices.framework */; }; @@ -1440,6 +1454,8 @@ 17150E1D2B05775A004D5309 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 1715D0292166B3F900227731 /* PremiumManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PremiumManager.h; sourceTree = ""; }; 1715D02A2166B3F900227731 /* PremiumManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PremiumManager.m; sourceTree = ""; }; + 171904B42BBC8D4E004CCC96 /* TrainerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerView.swift; sourceTree = ""; }; + 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerCapsule.swift; sourceTree = ""; }; 171B6FFC25C4C7C8008638A9 /* StoryPagesViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StoryPagesViewController.swift; sourceTree = ""; }; 1721C9D02497F91900B0EDC4 /* mute_gray.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = mute_gray.png; sourceTree = ""; }; 1723388A26BE43EB00610784 /* WidgetLoader.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WidgetLoader.swift; sourceTree = ""; }; @@ -1506,6 +1522,7 @@ 175792E42930605500490924 /* NB Alpha.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "NB Alpha.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 175792E62930611B00490924 /* LaunchScreenDev.xib */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.xib; path = LaunchScreenDev.xib; sourceTree = ""; }; 175792E82930617600490924 /* logo_newsblur_512-dev.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "logo_newsblur_512-dev.png"; sourceTree = ""; }; + 175DC6AE2BBB87D200B3708F /* TrainerViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerViewController.swift; sourceTree = ""; }; 175FAC4A23AB34EB002AC38C /* menu_icn_widget.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = menu_icn_widget.png; sourceTree = ""; }; 175FAC4B23AB34EB002AC38C /* menu_icn_widget@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "menu_icn_widget@2x.png"; sourceTree = ""; }; 1761295C1C630AEB00702FE4 /* mute_feed_off.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = mute_feed_off.png; sourceTree = ""; }; @@ -1556,6 +1573,10 @@ 17B33D1627D97281009108AD /* g_icn_folder_widget.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = g_icn_folder_widget.png; sourceTree = ""; }; 17B33D1727D97282009108AD /* g_icn_folder_widget@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "g_icn_folder_widget@2x.png"; sourceTree = ""; }; 17B96BE624304F72009A8EED /* Story Notification Service Extension.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = "Story Notification Service Extension.entitlements"; sourceTree = ""; }; + 17BC56A62BBE4A5600A30C41 /* TrainerWord.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerWord.swift; sourceTree = ""; }; + 17BC56A92BBF6BC000A30C41 /* Feed.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Feed.swift; sourceTree = ""; }; + 17BC56AC2BBF6C0000A30C41 /* StoryCache.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StoryCache.swift; sourceTree = ""; }; + 17BC56AF2BBF6C2200A30C41 /* StorySettings.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StorySettings.swift; sourceTree = ""; }; 17BD3BA42271102500F615EC /* Intents.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Intents.framework; path = System/Library/Frameworks/Intents.framework; sourceTree = SDKROOT; }; 17BD3BA62271122800F615EC /* CoreSpotlight.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreSpotlight.framework; path = System/Library/Frameworks/CoreSpotlight.framework; sourceTree = SDKROOT; }; 17BD3BA82271125400F615EC /* CoreServices.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreServices.framework; path = System/Library/Frameworks/CoreServices.framework; sourceTree = SDKROOT; }; @@ -2826,6 +2847,10 @@ 17EB505F1BE46A900021358B /* FontListViewController.m */, 78095EC6128F30B500230C8E /* OriginalStoryViewController.h */, 78095EC7128F30B500230C8E /* OriginalStoryViewController.m */, + 175DC6AE2BBB87D200B3708F /* TrainerViewController.swift */, + 171904B42BBC8D4E004CCC96 /* TrainerView.swift */, + 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */, + 17BC56A62BBE4A5600A30C41 /* TrainerWord.swift */, FF67D3B0168924C40057A7DA /* TrainerViewController.h */, FF67D3B1168924C40057A7DA /* TrainerViewController.m */, FF6282131A11613900271FDB /* UserTagsViewController.h */, @@ -3287,7 +3312,10 @@ 43D8189F15B9404D00733444 /* Models */ = { isa = PBXGroup; children = ( + 17BC56A92BBF6BC000A30C41 /* Feed.swift */, 172ECBF6298B1239006371BC /* Story.swift */, + 17BC56AC2BBF6C0000A30C41 /* StoryCache.swift */, + 17BC56AF2BBF6C2200A30C41 /* StorySettings.swift */, ); name = Models; sourceTree = ""; @@ -5005,6 +5033,7 @@ 175792542930605500490924 /* IASKPSTextFieldSpecifierViewCell.m in Sources */, 175792552930605500490924 /* SBJson4StreamWriter.m in Sources */, 175792562930605500490924 /* FontSettingsViewController.m in Sources */, + 17BC56B12BBF6C2200A30C41 /* StorySettings.swift in Sources */, 175792572930605500490924 /* IASKAppSettingsViewController.m in Sources */, 175792582930605500490924 /* MarkReadMenuViewController.m in Sources */, 175792592930605500490924 /* SSWAnimator.m in Sources */, @@ -5026,11 +5055,14 @@ 175792692930605500490924 /* main.m in Sources */, 1757926A2930605500490924 /* MBProgressHUD.m in Sources */, 1757926B2930605500490924 /* NSString+HTML.m in Sources */, + 171904B92BBCA712004CCC96 /* TrainerCapsule.swift in Sources */, 1757926C2930605500490924 /* IASKSettingsReader.m in Sources */, 1757926D2930605500490924 /* UserTagsViewController.m in Sources */, 1757926E2930605500490924 /* StringHelper.m in Sources */, 1757926F2930605500490924 /* TransparentToolbar.m in Sources */, 179A88032B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */, + 175DC6B02BBB87D200B3708F /* TrainerViewController.swift in Sources */, + 171904B62BBC8D4E004CCC96 /* TrainerView.swift in Sources */, 175792702930605500490924 /* THCircularProgressView.m in Sources */, 175792712930605500490924 /* IASKSpecifier.m in Sources */, 175792722930605500490924 /* UIView+ViewController.m in Sources */, @@ -5065,6 +5097,7 @@ 1757928C2930605500490924 /* PINMemoryCache.m in Sources */, 1757928D2930605500490924 /* SiteCell.m in Sources */, 1757928E2930605500490924 /* SloppySwiper.m in Sources */, + 17BC56A82BBE4A5600A30C41 /* TrainerWord.swift in Sources */, 1757928F2930605500490924 /* UIViewController+HidePopover.m in Sources */, 175792902930605500490924 /* FolderTitleView.m in Sources */, 175792912930605500490924 /* HorizontalPageDelegate.swift in Sources */, @@ -5103,6 +5136,7 @@ 175792B02930605500490924 /* FMResultSet.m in Sources */, 175792B12930605500490924 /* NBNotifier.m in Sources */, 175792B22930605500490924 /* TUSafariActivity.m in Sources */, + 17BC56AE2BBF6C0000A30C41 /* StoryCache.swift in Sources */, 175792B32930605500490924 /* PremiumViewController.m in Sources */, 175792B42930605500490924 /* NBLoadingCell.m in Sources */, 175792B52930605500490924 /* IASKTextViewCell.m in Sources */, @@ -5110,6 +5144,7 @@ 175792B72930605500490924 /* IASKTextView.m in Sources */, 175792B82930605500490924 /* OfflineSyncUnreads.m in Sources */, 175792B92930605500490924 /* IASKPSSliderSpecifierViewCell.m in Sources */, + 17BC56AB2BBF6BC000A30C41 /* Feed.swift in Sources */, 175792BA2930605500490924 /* OfflineFetchStories.m in Sources */, 175792BB2930605500490924 /* OfflineFetchText.m in Sources */, 175792BC2930605500490924 /* OfflineFetchImages.m in Sources */, @@ -5206,6 +5241,7 @@ FF34FD6B1E9D93CB0062F8ED /* IASKPSTextFieldSpecifierViewCell.m in Sources */, FF8D1ED01BAA311000725D8A /* SBJson4StreamWriter.m in Sources */, 43763AD1158F90B100B3DBE2 /* FontSettingsViewController.m in Sources */, + 17BC56B02BBF6C2200A30C41 /* StorySettings.swift in Sources */, FF34FD601E9D93CB0062F8ED /* IASKAppSettingsViewController.m in Sources */, 17CBD3BF1BF66B6C003FCCAE /* MarkReadMenuViewController.m in Sources */, FFA045B519CA49D700618DC4 /* SSWAnimator.m in Sources */, @@ -5227,11 +5263,14 @@ 43A4C3DC15B00966008787B5 /* main.m in Sources */, 43A4C3DD15B00966008787B5 /* MBProgressHUD.m in Sources */, 43A4C3E115B00966008787B5 /* NSString+HTML.m in Sources */, + 171904B82BBCA712004CCC96 /* TrainerCapsule.swift in Sources */, FF34FD641E9D93CB0062F8ED /* IASKSettingsReader.m in Sources */, FF6282151A11613900271FDB /* UserTagsViewController.m in Sources */, 43A4C3E315B00966008787B5 /* StringHelper.m in Sources */, 43A4C3E415B00966008787B5 /* TransparentToolbar.m in Sources */, 179A88022B48E64A00916CF4 /* ToolbarDelegate.swift in Sources */, + 175DC6AF2BBB87D200B3708F /* TrainerViewController.swift in Sources */, + 171904B52BBC8D4E004CCC96 /* TrainerView.swift in Sources */, FFD6604C1BACA45D006E4B8D /* THCircularProgressView.m in Sources */, FF34FD681E9D93CB0062F8ED /* IASKSpecifier.m in Sources */, FFA0484419CA73B700618DC4 /* UIView+ViewController.m in Sources */, @@ -5266,6 +5305,7 @@ FF2924E71E932D2900FCFA63 /* PINMemoryCache.m in Sources */, 43CE0F5F15DADB7F00608ED8 /* SiteCell.m in Sources */, FFA045B419CA49D700618DC4 /* SloppySwiper.m in Sources */, + 17BC56A72BBE4A5600A30C41 /* TrainerWord.swift in Sources */, E160F0571C9DAC2C00CB96DF /* UIViewController+HidePopover.m in Sources */, FFDE35CC161B8F870034BFDE /* FolderTitleView.m in Sources */, 172AD264251D901D000BB264 /* HorizontalPageDelegate.swift in Sources */, @@ -5304,6 +5344,7 @@ FF753CD3175858FC00344EC9 /* FMResultSet.m in Sources */, FF6618C8176184560039913B /* NBNotifier.m in Sources */, FF03AFF319F87F2E0063002A /* TUSafariActivity.m in Sources */, + 17BC56AD2BBF6C0000A30C41 /* StoryCache.swift in Sources */, FF83FF051FB52565008DAC0F /* PremiumViewController.m in Sources */, FF11045F176950F900502C29 /* NBLoadingCell.m in Sources */, FF34FD701E9D93CB0062F8ED /* IASKTextViewCell.m in Sources */, @@ -5311,6 +5352,7 @@ FF34FD6F1E9D93CB0062F8ED /* IASKTextView.m in Sources */, FF855B5B1794B0670098D48A /* OfflineSyncUnreads.m in Sources */, FF34FD6A1E9D93CB0062F8ED /* IASKPSSliderSpecifierViewCell.m in Sources */, + 17BC56AA2BBF6BC000A30C41 /* Feed.swift in Sources */, FF855B5E1794B0760098D48A /* OfflineFetchStories.m in Sources */, 17362ADD23639B4E00A0FCCC /* OfflineFetchText.m in Sources */, FF855B611794B0830098D48A /* OfflineFetchImages.m in Sources */, diff --git a/clients/ios/Resources-iPhone/TrainerViewController.xib b/clients/ios/Resources-iPhone/TrainerViewController.xib index 2dcced6c96..1030ffc963 100644 --- a/clients/ios/Resources-iPhone/TrainerViewController.xib +++ b/clients/ios/Resources-iPhone/TrainerViewController.xib @@ -1,43 +1,31 @@ - + - + - - - - - - - - - - - - - - - - - - + + + + + + From 2e351856295ac71350a3138bad5deabeb85216fa Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 22 Apr 2024 15:59:59 -0400 Subject: [PATCH 28/69] Added privacy manifest --- clients/ios/NewsBlur.xcodeproj/project.pbxproj | 6 ++++++ clients/ios/Resources/Info.plist | 3 ++- clients/ios/Resources/PrivacyInfo.xcprivacy | 18 ++++++++++++++++++ 3 files changed, 26 insertions(+), 1 deletion(-) create mode 100644 clients/ios/Resources/PrivacyInfo.xcprivacy diff --git a/clients/ios/NewsBlur.xcodeproj/project.pbxproj b/clients/ios/NewsBlur.xcodeproj/project.pbxproj index df7a65f022..237982dc3f 100755 --- a/clients/ios/NewsBlur.xcodeproj/project.pbxproj +++ b/clients/ios/NewsBlur.xcodeproj/project.pbxproj @@ -20,6 +20,8 @@ 17150E1E2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; 17150E1F2B05775A004D5309 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 17150E1D2B05775A004D5309 /* SceneDelegate.swift */; }; 1715D02B2166B3F900227731 /* PremiumManager.m in Sources */ = {isa = PBXBuildFile; fileRef = 1715D02A2166B3F900227731 /* PremiumManager.m */; }; + 17179E292BD6F86C006B18D5 /* PrivacyInfo.xcprivacy in Resources */ = {isa = PBXBuildFile; fileRef = 17179E282BD6F86C006B18D5 /* PrivacyInfo.xcprivacy */; }; + 17179E2A2BD6F86D006B18D5 /* PrivacyInfo.xcprivacy in Resources */ = {isa = PBXBuildFile; fileRef = 17179E282BD6F86C006B18D5 /* PrivacyInfo.xcprivacy */; }; 171904B52BBC8D4E004CCC96 /* TrainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B42BBC8D4E004CCC96 /* TrainerView.swift */; }; 171904B62BBC8D4E004CCC96 /* TrainerView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B42BBC8D4E004CCC96 /* TrainerView.swift */; }; 171904B82BBCA712004CCC96 /* TrainerCapsule.swift in Sources */ = {isa = PBXBuildFile; fileRef = 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */; }; @@ -1454,6 +1456,7 @@ 17150E1D2B05775A004D5309 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; 1715D0292166B3F900227731 /* PremiumManager.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PremiumManager.h; sourceTree = ""; }; 1715D02A2166B3F900227731 /* PremiumManager.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = PremiumManager.m; sourceTree = ""; }; + 17179E282BD6F86C006B18D5 /* PrivacyInfo.xcprivacy */ = {isa = PBXFileReference; lastKnownFileType = text.xml; path = PrivacyInfo.xcprivacy; sourceTree = ""; }; 171904B42BBC8D4E004CCC96 /* TrainerView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerView.swift; sourceTree = ""; }; 171904B72BBCA712004CCC96 /* TrainerCapsule.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TrainerCapsule.swift; sourceTree = ""; }; 171B6FFC25C4C7C8008638A9 /* StoryPagesViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = StoryPagesViewController.swift; sourceTree = ""; }; @@ -2722,6 +2725,7 @@ E1D123FD1C66753D00434F40 /* Localizable.stringsdict */, FF8C49921BBC9D140010D894 /* App.entitlements */, 8D1107310486CEB800E47090 /* Info.plist */, + 17179E282BD6F86C006B18D5 /* PrivacyInfo.xcprivacy */, ); path = Resources; sourceTree = ""; @@ -4395,6 +4399,7 @@ 175791E42930605500490924 /* train@2x.png in Resources */, 175791E52930605500490924 /* logo_40.png in Resources */, 175791E62930605500490924 /* menu_icn_share.png in Resources */, + 17179E2A2BD6F86D006B18D5 /* PrivacyInfo.xcprivacy in Resources */, 175791E72930605500490924 /* autoscroll_pause.png in Resources */, 175791E82930605500490924 /* safari@3x.png in Resources */, 175791E92930605500490924 /* g_icn_eating.png in Resources */, @@ -4811,6 +4816,7 @@ FF03AFE419F87A770063002A /* g_icn_folder_read.png in Resources */, FFDD845E16E8871A000AA0A2 /* menu_icn_fetch_subscribers.png in Resources */, FF5ACC211DE5ED7500FBD044 /* menu_icn_notifications.png in Resources */, + 17179E292BD6F86C006B18D5 /* PrivacyInfo.xcprivacy in Resources */, FFDD845F16E8871A000AA0A2 /* menu_icn_fetch_subscribers@2x.png in Resources */, FFDD846016E8871A000AA0A2 /* menu_icn_fetch.png in Resources */, FFDD846116E8871A000AA0A2 /* menu_icn_fetch@2x.png in Resources */, diff --git a/clients/ios/Resources/Info.plist b/clients/ios/Resources/Info.plist index 2b12f9d9f8..355867e285 100644 --- a/clients/ios/Resources/Info.plist +++ b/clients/ios/Resources/Info.plist @@ -208,8 +208,9 @@ UIBackgroundModes - fetch audio + fetch + processing UILaunchStoryboardName $(LAUNCH_SCREEN_NAME) diff --git a/clients/ios/Resources/PrivacyInfo.xcprivacy b/clients/ios/Resources/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..729c0df808 --- /dev/null +++ b/clients/ios/Resources/PrivacyInfo.xcprivacy @@ -0,0 +1,18 @@ + + + + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryUserDefaults + NSPrivacyAccessedAPITypeReasons + + + 1C8F.1 + + + + + From ca4ec6008abc042d3395ab0961266aaf421f3bee Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 22 Apr 2024 20:18:51 -0400 Subject: [PATCH 29/69] #1247 (Mac Catalyst edition) - Avoiding spurious memory warning after about 18 minutes, with only about 100 MB memory used, clearing cached images. --- clients/ios/Classes/NewsBlurAppDelegate.m | 2 ++ 1 file changed, 2 insertions(+) diff --git a/clients/ios/Classes/NewsBlurAppDelegate.m b/clients/ios/Classes/NewsBlurAppDelegate.m index 1d6ba4f4eb..45b0a93914 100644 --- a/clients/ios/Classes/NewsBlurAppDelegate.m +++ b/clients/ios/Classes/NewsBlurAppDelegate.m @@ -650,8 +650,10 @@ - (void)didReceiveMemoryWarning { // Releases the view if it doesn't have a superview. [super didReceiveMemoryWarning]; +#if !TARGET_OS_MACCATALYST // Release any cached data, images, etc that aren't in use. [cachedStoryImages removeAllObjects]; +#endif } - (void)setupReachability { From dd116b3f47d8c606c6bca6415b2ed1439889383b Mon Sep 17 00:00:00 2001 From: David Sinclair Date: Mon, 22 Apr 2024 20:42:37 -0400 Subject: [PATCH 30/69] #1247 (Mac Catalyst edition) - Fixed feeds cells colors when switching system appearance. --- clients/ios/Classes/BaseViewController.h | 1 + clients/ios/Classes/BaseViewController.m | 30 +++++++++++-------- clients/ios/Classes/FeedsObjCViewController.m | 12 ++++++++ 3 files changed, 30 insertions(+), 13 deletions(-) diff --git a/clients/ios/Classes/BaseViewController.h b/clients/ios/Classes/BaseViewController.h index 13da3b6409..3469bd8218 100644 --- a/clients/ios/Classes/BaseViewController.h +++ b/clients/ios/Classes/BaseViewController.h @@ -27,6 +27,7 @@ - (void)addKeyCommandWithInput:(NSString *)input modifierFlags:(UIKeyModifierFlags)modifierFlags action:(SEL)action discoverabilityTitle:(NSString *)discoverabilityTitle wantPriority:(BOOL)wantPriority; - (void)addCancelKeyCommandWithAction:(SEL)action discoverabilityTitle:(NSString *)discoverabilityTitle; +- (void)systemAppearanceDidChange:(BOOL)isDark; - (void)updateTheme; - (void)tableView:(UITableView *)tableView redisplayCellAtIndexPath:(NSIndexPath *)indexPath; diff --git a/clients/ios/Classes/BaseViewController.m b/clients/ios/Classes/BaseViewController.m index e6c5c7faf4..ef22450b70 100644 --- a/clients/ios/Classes/BaseViewController.m +++ b/clients/ios/Classes/BaseViewController.m @@ -53,7 +53,7 @@ - (void)informError:(id)error details:(NSString *)details statusCode:(NSInteger) return [self informError:@"The server barfed!"]; } else { errorMessage = [error localizedDescription]; - if ([error code] == 4 && + if ([error code] == 4 && [errorMessage rangeOfString:@"cancelled"].location != NSNotFound) { return; } @@ -61,8 +61,8 @@ - (void)informError:(id)error details:(NSString *)details statusCode:(NSInteger) [MBProgressHUD hideHUDForView:self.view animated:YES]; MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.view animated:YES]; - [HUD setCustomView:[[UIImageView alloc] - initWithImage:[UIImage imageNamed:@"warning.gif"]]]; + [HUD setCustomView:[[UIImageView alloc] + initWithImage:[UIImage imageNamed:@"warning.gif"]]]; [HUD setMode:MBProgressHUDModeCustomView]; if (details) { [HUD setDetailsLabelText:details]; @@ -70,19 +70,19 @@ - (void)informError:(id)error details:(NSString *)details statusCode:(NSInteger) HUD.labelText = errorMessage; [HUD hide:YES afterDelay:(details ? 3 : 1)]; -// UIAlertView* alertView = [[UIAlertView alloc] -// initWithTitle:@"Error" -// message:localizedDescription delegate:nil -// cancelButtonTitle:@"OK" -// otherButtonTitles:nil]; -// [alertView show]; -// [alertView release]; + // UIAlertView* alertView = [[UIAlertView alloc] + // initWithTitle:@"Error" + // message:localizedDescription delegate:nil + // cancelButtonTitle:@"OK" + // otherButtonTitles:nil]; + // [alertView show]; + // [alertView release]; } - (void)informMessage:(NSString *)message { [MBProgressHUD hideHUDForView:self.view animated:YES]; MBProgressHUD *HUD = [MBProgressHUD showHUDAddedTo:self.view animated:YES]; - HUD.mode = MBProgressHUDModeText; + HUD.mode = MBProgressHUDModeText; HUD.labelText = message; [HUD hide:YES afterDelay:.75]; } @@ -94,6 +94,10 @@ - (void)informLoadingMessage:(NSString *)message { [HUD hide:YES afterDelay:2]; } +- (void)systemAppearanceDidChange:(BOOL)isDark { + [[ThemeManager themeManager] systemAppearanceDidChange:isDark]; +} + - (void)updateTheme { // Subclasses should override this, calling super, to update their nav bar, table, etc } @@ -160,7 +164,7 @@ - (void)addCancelKeyCommandWithAction:(SEL)action discoverabilityTitle:(NSString #pragma mark UIViewController - (void) viewDidLoad { - [super viewDidLoad]; + [super viewDidLoad]; BOOL isDark = [NewsBlurAppDelegate sharedAppDelegate].window.windowScene.traitCollection.userInterfaceStyle == UIUserInterfaceStyleDark; @@ -182,7 +186,7 @@ - (void)traitCollectionDidChange:(UITraitCollection *)previousTraitCollection { BOOL isDark = [NewsBlurAppDelegate sharedAppDelegate].window.windowScene.traitCollection.userInterfaceStyle == UIUserInterfaceStyleDark; - [[ThemeManager themeManager] systemAppearanceDidChange:isDark]; + [self systemAppearanceDidChange:isDark]; } - (UIStatusBarStyle)preferredStatusBarStyle { diff --git a/clients/ios/Classes/FeedsObjCViewController.m b/clients/ios/Classes/FeedsObjCViewController.m index e27b4a9c52..0a2b7812a2 100644 --- a/clients/ios/Classes/FeedsObjCViewController.m +++ b/clients/ios/Classes/FeedsObjCViewController.m @@ -1321,6 +1321,18 @@ - (void)resizeFontSize { [appDelegate.feedDetailViewController reloadWithSizing]; } +- (void)systemAppearanceDidChange:(BOOL)isDark { + [super systemAppearanceDidChange:isDark]; + +#if TARGET_OS_MACCATALYST + if (ThemeManager.themeManager.isLikeSystem) { + self.view.backgroundColor = UIColor.clearColor; + } else { + self.view.backgroundColor = UIColorFromRGB(0xf4f4f4); + } +#endif +} + - (void)updateTheme { [super updateTheme]; From d1dafe7606a9e38c370d8c9bd992fa081fe8e8de Mon Sep 17 00:00:00 2001 From: Samuel Clay Date: Wed, 24 Apr 2024 09:43:56 -0400 Subject: [PATCH 31/69] Black formatting. --- .../consul/tasks/get_consul_manager_ip.py | 32 +- .../tasks/get_credentials.py | 13 +- ansible/utils/check_droplet.py | 6 +- ansible/utils/generate_inventory.py | 19 +- api/newsblur.py | 348 +- apps/analyzer/classifier.py | 76 +- apps/analyzer/feed_filter.py | 38 +- apps/analyzer/forms.py | 25 +- apps/analyzer/lda.py | 404 +-- apps/analyzer/migrations/0001_initial.py | 45 +- apps/analyzer/models.py | 253 +- apps/analyzer/phrase_filter.py | 73 +- apps/analyzer/tasks.py | 6 +- apps/analyzer/tests.py | 206 +- apps/analyzer/tfidf.py | 5 +- apps/analyzer/tokenizer.py | 20 +- apps/analyzer/urls.py | 8 +- apps/analyzer/views.py | 122 +- apps/api/tests.py | 9 +- apps/api/urls.py | 28 +- apps/api/views.py | 478 +-- apps/categories/models.py | 77 +- apps/categories/urls.py | 4 +- apps/categories/views.py | 35 +- apps/feed_import/migrations/0001_initial.py | 36 +- apps/feed_import/models.py | 141 +- apps/feed_import/tasks.py | 6 +- apps/feed_import/test_feed_import.py | 89 +- apps/feed_import/urls.py | 4 +- apps/feed_import/views.py | 44 +- apps/mobile/tests.py | 9 +- apps/mobile/urls.py | 2 +- apps/mobile/views.py | 3 +- apps/monitor/urls.py | 54 +- apps/monitor/views/newsblur_app_servers.py | 72 +- apps/monitor/views/newsblur_app_times.py | 37 +- apps/monitor/views/newsblur_classifiers.py | 12 +- apps/monitor/views/newsblur_dbtimes.py | 30 +- apps/monitor/views/newsblur_errors.py | 11 +- apps/monitor/views/newsblur_feed_counts.py | 41 +- apps/monitor/views/newsblur_feed_sizes.py | 25 +- apps/monitor/views/newsblur_feeds.py | 24 +- apps/monitor/views/newsblur_loadtimes.py | 13 +- apps/monitor/views/newsblur_stories.py | 11 +- apps/monitor/views/newsblur_tasks_codes.py | 40 +- apps/monitor/views/newsblur_tasks_pipeline.py | 44 +- apps/monitor/views/newsblur_tasks_servers.py | 67 +- apps/monitor/views/newsblur_tasks_times.py | 36 +- apps/monitor/views/newsblur_updates.py | 27 +- apps/monitor/views/newsblur_users.py | 83 +- apps/monitor/views/prometheus_redis.py | 38 +- apps/newsletters/models.py | 165 +- apps/newsletters/urls.py | 4 +- apps/newsletters/views.py | 22 +- apps/notifications/models.py | 175 +- apps/notifications/urls.py | 12 +- apps/notifications/views.py | 82 +- apps/oauth/models.py | 2 +- apps/oauth/urls.py | 66 +- apps/oauth/views.py | 770 ++-- apps/profile/factories.py | 11 +- apps/profile/forms.py | 222 +- apps/profile/management/commands/check_db.py | 4 +- apps/profile/management/commands/fp.py | 10 +- .../commands/reimport_paypal_history.py | 48 +- .../commands/reimport_stripe_history.py | 31 +- .../management/commands/remove_last_user.py | 7 +- apps/profile/middleware.py | 207 +- apps/profile/migrations/0001_initial.py | 546 ++- .../migrations/0002_auto_20200620_0803.py | 14 +- .../migrations/0003_auto_20201005_0932.py | 453 ++- .../migrations/0004_auto_20220110_2106.py | 474 ++- .../migrations/0005_profile_is_archive.py | 7 +- .../migrations/0006_profile_days_of_unread.py | 7 +- .../migrations/0007_auto_20220125_2108.py | 458 ++- .../migrations/0008_profile_paypal_sub_id.py | 7 +- apps/profile/migrations/0009_paypalids.py | 22 +- .../0010_profile_active_provider.py | 7 +- .../migrations/0011_auto_20220408_1908.py | 462 ++- .../migrations/0012_auto_20220511_1710.py | 454 ++- apps/profile/models.py | 1899 ++++++---- apps/profile/tasks.py | 34 +- apps/profile/test_profile.py | 37 +- apps/profile/urls.py | 76 +- apps/profile/views.py | 812 +++-- apps/push/migrations/0001_initial.py | 27 +- apps/push/models.py | 129 +- apps/push/signals.py | 4 +- apps/push/test_push.py | 321 +- apps/push/urls.py | 2 +- apps/push/views.py | 60 +- apps/reader/admin.py | 2 +- apps/reader/factories.py | 14 +- apps/reader/forms.py | 171 +- apps/reader/http.py | 3 +- apps/reader/managers.py | 36 +- apps/reader/migrations/0001_initial.py | 89 +- apps/reader/models.py | 1407 ++++---- apps/reader/tasks.py | 25 +- apps/reader/test_reader.py | 216 +- apps/reader/urls.py | 144 +- apps/reader/views.py | 2729 +++++++------- .../migrations/0001_initial.py | 71 +- apps/recommendations/models.py | 52 +- .../templatetags/recommendations_tags.py | 28 +- apps/recommendations/tests.py | 9 +- apps/recommendations/urls.py | 10 +- apps/recommendations/views.py | 115 +- apps/rss_feeds/factories.py | 17 +- apps/rss_feeds/icon_importer.py | 177 +- .../management/commands/calculate_scores.py | 56 +- .../management/commands/count_stories.py | 18 +- .../management/commands/count_subscribers.py | 28 +- .../management/commands/mark_read.py | 22 +- .../management/commands/query_popularity.py | 4 +- .../management/commands/refresh_feed.py | 14 +- .../management/commands/refresh_feeds.py | 95 +- .../management/commands/task_feeds.py | 20 +- .../management/commands/trim_feeds.py | 24 +- apps/rss_feeds/migrations/0001_initial.py | 142 +- .../migrations/0002_remove_mongo_types.py | 15 +- .../migrations/0003_auto_20220110_2105.py | 23 +- .../migrations/0003_mongo_version_4_0.py | 10 +- .../migrations/0004_feed_pro_subscribers.py | 7 +- .../0005_feed_archive_subscribers.py | 7 +- .../migrations/0006_feed_fs_size_bytes.py | 7 +- .../migrations/0007_merge_20220517_1355.py | 8 +- .../migrations/0008_feed_archive_count.py | 7 +- apps/rss_feeds/models.py | 3143 +++++++++-------- apps/rss_feeds/page_importer.py | 268 +- apps/rss_feeds/tasks.py | 211 +- apps/rss_feeds/test_rss_feeds.py | 161 +- apps/rss_feeds/text_importer.py | 204 +- apps/rss_feeds/urls.py | 38 +- apps/rss_feeds/views.py | 441 +-- .../search/management/commands/index_feeds.py | 20 +- .../management/commands/index_stories.py | 26 +- apps/search/models.py | 464 +-- apps/search/tasks.py | 10 +- apps/search/urls.py | 2 +- apps/search/views.py | 19 +- .../management/commands/popular_stories.py | 4 +- .../social/migrations/0001_username_unique.py | 14 +- apps/social/models.py | 2948 +++++++++------- apps/social/tasks.py | 39 +- apps/social/templatetags/social_tags.py | 72 +- apps/social/urls.py | 98 +- apps/social/views.py | 1922 +++++----- apps/static/tests.py | 9 +- apps/static/views.py | 101 +- .../management/commands/collect_feedback.py | 4 +- .../management/commands/collect_stats.py | 3 +- apps/statistics/models.py | 336 +- apps/statistics/rstats.py | 135 +- apps/statistics/tasks.py | 9 +- .../templatetags/statistics_tags.py | 17 +- apps/statistics/tests.py | 9 +- apps/statistics/urls.py | 8 +- apps/statistics/views.py | 91 +- archive/ansible/do_inventory.py | 334 +- archive/fabfile.py | 2031 ++++++----- archive/jammit.py | 94 +- archive/munin/munin/__init__.py | 31 +- archive/munin/munin/cassandra.py | 31 +- archive/munin/munin/ddwrt.py | 9 +- archive/munin/munin/gearman.py | 27 +- archive/munin/munin/memcached.py | 11 +- archive/munin/munin/mongodb.py | 16 +- archive/munin/munin/mysql.py | 24 +- archive/munin/munin/nginx.py | 6 +- archive/munin/munin/pgbouncer.py | 9 +- archive/munin/munin/postgres.py | 23 +- archive/munin/munin/redis.py | 13 +- archive/munin/munin/riak.py | 7 +- config/gunicorn_conf.py | 10 +- config/pystartup.py | 34 +- flask_metrics/flask_metrics_haproxy.py | 55 +- flask_metrics/flask_metrics_mongo.py | 89 +- flask_metrics/flask_metrics_redis.py | 150 +- flask_monitor/db_monitor.py | 135 +- manage.py | 1 - newsblur_web/__init__.py | 2 +- newsblur_web/celeryapp.py | 7 +- newsblur_web/docker_local_settings.py | 137 +- newsblur_web/settings.py | 870 +++-- newsblur_web/sitecustomize.py | 7 +- newsblur_web/test_settings.py | 26 +- newsblur_web/urls.py | 140 +- newsblur_web/wsgi.py | 4 +- perf/locust.py | 1 + utils/PyRSS2Gen.py | 206 +- utils/S3.py | 336 +- utils/archive/Image Color Algorithm.py | 27 +- utils/archive/bootstrap_intel.py | 4 +- utils/archive/bootstrap_mongo.py | 155 +- utils/archive/bootstrap_redis_sessions.py | 4 +- utils/archive/bootstrap_story_hash.py | 18 +- utils/archive/check_status.py | 9 +- utils/archive/green.py | 24 +- utils/archive/knight.py | 88 +- utils/archive/memcached_status.py | 44 +- utils/db_functions.py | 17 +- utils/exception_middleware.py | 10 +- utils/facebook_fetcher.py | 205 +- utils/feed_fetcher.py | 508 +-- utils/feed_functions.py | 197 +- utils/feedfinder_forman.py | 34 +- utils/feedfinder_pilgrim.py | 256 +- utils/fields.py | 16 +- utils/grafana_backup.py | 28 +- utils/hostname_ssh.py | 13 +- utils/image_functions.py | 51 +- utils/jennyholzer.py | 4 +- utils/json_fetcher.py | 57 +- utils/json_functions.py | 41 +- utils/log.py | 136 +- utils/management_functions.py | 11 +- utils/mongo_command_monitor.py | 59 +- utils/mongo_raw_log_middleware.py | 109 +- utils/mongoengine_fields.py | 28 +- utils/monitor_disk_usage.py | 26 +- utils/monitor_newsletter_delivery.py | 43 +- utils/monitor_redis_bgsave.py | 30 +- utils/monitor_task_fetches.py | 38 +- utils/monitor_work_queue.py | 29 +- utils/munin/base.py | 15 +- utils/munin/newsblur_app_servers.py | 92 +- utils/munin/newsblur_app_times.py | 56 +- utils/munin/newsblur_classifiers.py | 31 +- utils/munin/newsblur_dbtimes.py | 53 +- utils/munin/newsblur_errors.py | 22 +- utils/munin/newsblur_feed_counts.py | 63 +- utils/munin/newsblur_feeds.py | 39 +- utils/munin/newsblur_loadtimes.py | 25 +- utils/munin/newsblur_stories.py | 22 +- utils/munin/newsblur_tasks_codes.py | 56 +- utils/munin/newsblur_tasks_pipeline.py | 67 +- utils/munin/newsblur_tasks_servers.py | 92 +- utils/munin/newsblur_tasks_times.py | 57 +- utils/munin/newsblur_updates.py | 54 +- utils/munin/newsblur_users.py | 37 +- utils/pipeline_utils.py | 115 +- utils/ratelimit.py | 77 +- utils/redis_raw_log_middleware.py | 120 +- utils/request_introspection_middleware.py | 83 +- utils/rtail.py | 10 +- utils/s3_utils.py | 53 +- utils/scrubber/__init__.py | 313 +- utils/story_functions.py | 321 +- utils/templatetags/utils_tags.py | 208 +- utils/testrunner.py | 11 +- utils/tlnb.py | 78 +- utils/tlnbt.py | 1 - utils/tlnbw.py | 1 - utils/tornado_escape.py | 49 +- utils/twitter_fetcher.py | 563 +-- utils/urlnorm.py | 243 +- utils/user_functions.py | 173 +- utils/view_functions.py | 53 +- utils/zgrep.py | 53 +- vendor/appdotnet.py | 162 +- vendor/cjson/jsontest.py | 283 +- vendor/cjson/setup.py | 26 +- vendor/facebook.py | 75 +- vendor/mailgun.py | 164 +- vendor/oauth2client/anyjson.py | 8 +- vendor/oauth2client/appengine.py | 941 +++-- vendor/oauth2client/client.py | 2020 +++++------ vendor/oauth2client/clientsecrets.py | 186 +- vendor/oauth2client/crypt.py | 343 +- vendor/oauth2client/django_orm.py | 201 +- vendor/oauth2client/file.py | 152 +- vendor/oauth2client/gce.py | 105 +- vendor/oauth2client/keyring_storage.py | 147 +- vendor/oauth2client/locked_file.py | 576 ++- vendor/oauth2client/multistore_file.py | 627 ++-- vendor/oauth2client/tools.py | 242 +- vendor/oauth2client/util.py | 174 +- vendor/opml/__init__.py | 18 +- vendor/opml/tests.py | 12 +- vendor/paypalapi/__init__.py | 13 +- vendor/paypalapi/compat.py | 42 +- vendor/paypalapi/countries.py | 480 +-- vendor/paypalapi/exceptions.py | 11 +- vendor/paypalapi/interface.py | 175 +- vendor/paypalapi/response.py | 11 +- vendor/paypalapi/response_list.py | 24 +- vendor/paypalapi/settings.py | 75 +- vendor/python-munin/bin/munin-node.py | 39 +- vendor/python-munin/docs/conf.py | 99 +- vendor/python-munin/setup.py | 26 +- vendor/readability/cleaners.py | 3 +- vendor/readability/compat/__init__.py | 6 +- vendor/readability/encoding.py | 16 +- vendor/readability/htmls.py | 20 +- vendor/readability/readability.py | 90 +- vendor/reverend/guessers/email.py | 59 +- vendor/reverend/test/test_email.py | 4 +- vendor/reverend/thomas.py | 151 +- vendor/reverend/ui/tester.py | 75 +- vendor/reverend/ui/trainer.py | 116 +- vendor/reverend/ui/util.py | 35 +- vendor/rfc822.py | 331 +- vendor/timezones/__init__.py | 1 - vendor/timezones/decorators.py | 7 +- vendor/timezones/fields.py | 49 +- vendor/timezones/forms.py | 6 +- .../templatetags/timezone_filters.py | 5 +- vendor/timezones/timezones_tests/__init__.py | 2 +- vendor/timezones/timezones_tests/models.py | 1 - vendor/timezones/timezones_tests/tests.py | 81 +- vendor/timezones/utilities.py | 2 + vendor/timezones/zones.py | 3 +- vendor/zebra/admin.py | 2 +- vendor/zebra/conf/options.py | 58 +- vendor/zebra/forms.py | 28 +- .../commands/clear_stripe_test_customers.py | 15 +- vendor/zebra/migrations/0001_initial.py | 54 +- vendor/zebra/mixins.py | 58 +- vendor/zebra/models.py | 4 +- vendor/zebra/signals.py | 62 +- vendor/zebra/templatetags/zebra_tags.py | 5 +- vendor/zebra/urls.py | 6 +- vendor/zebra/utils.py | 16 +- vendor/zebra/views.py | 31 +- vendor/zebra/widgets.py | 22 +- 326 files changed, 27153 insertions(+), 20356 deletions(-) diff --git a/ansible/roles/consul/tasks/get_consul_manager_ip.py b/ansible/roles/consul/tasks/get_consul_manager_ip.py index e98eb6b3f0..659d06a478 100755 --- a/ansible/roles/consul/tasks/get_consul_manager_ip.py +++ b/ansible/roles/consul/tasks/get_consul_manager_ip.py @@ -14,24 +14,31 @@ def get_host_ips_from_group(group_name): :param inventory_base_path: Base path to the inventory directories. Defaults to the path in ansible.cfg. :return: A list of IP addresses belonging to the specified group. """ - cmd = ['ansible-inventory', '-i', '/srv/newsblur/ansible/inventories/hetzner.ini', '-i', '/srv/newsblur/ansible/inventories/hetzner.yml', '--list'] - + cmd = [ + "ansible-inventory", + "-i", + "/srv/newsblur/ansible/inventories/hetzner.ini", + "-i", + "/srv/newsblur/ansible/inventories/hetzner.yml", + "--list", + ] + try: # Execute the ansible-inventory command result = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, check=True) - + # Parse the JSON output from ansible-inventory inventory_data = json.loads(result.stdout) - + host_ips = [] # Check if the group exists if group_name in inventory_data: # Get the list of hosts in the specified group - if 'hosts' in inventory_data[group_name]: - for host in inventory_data[group_name]['hosts']: + if "hosts" in inventory_data[group_name]: + for host in inventory_data[group_name]["hosts"]: # Fetch the host details, specifically looking for the ansible_host variable for the IP - host_vars = inventory_data['_meta']['hostvars'][host] - ip_address = host_vars.get('ansible_host', None) + host_vars = inventory_data["_meta"]["hostvars"][host] + ip_address = host_vars.get("ansible_host", None) if ip_address: host_ips.append(ip_address) else: @@ -50,16 +57,19 @@ def get_host_ips_from_group(group_name): with open(TOKEN_FILE) as f: token = f.read().strip() - os.environ['DO_API_TOKEN'] = token + os.environ["DO_API_TOKEN"] = token manager = digitalocean.Manager(token=token) my_droplets = manager.get_all_droplets() consul_manager_droplets = [d for d in my_droplets if "db-consul" in d.name] # Use ansible-inventory to get the consul-manager ip -group_name = 'hconsul' +group_name = "hconsul" hetzner_hosts = get_host_ips_from_group(group_name) -consul_manager_ip_address = ','.join([f"\"{droplet.ip_address}\"" for droplet in consul_manager_droplets] + [f"\"{host}\"" for host in hetzner_hosts]) +consul_manager_ip_address = ",".join( + [f'"{droplet.ip_address}"' for droplet in consul_manager_droplets] + + [f'"{host}"' for host in hetzner_hosts] +) print(consul_manager_ip_address) diff --git a/ansible/roles/postgres-exporter/tasks/get_credentials.py b/ansible/roles/postgres-exporter/tasks/get_credentials.py index 862c46d897..85fa38df88 100755 --- a/ansible/roles/postgres-exporter/tasks/get_credentials.py +++ b/ansible/roles/postgres-exporter/tasks/get_credentials.py @@ -1,12 +1,13 @@ #!/srv/newsblur/venv/newsblur3/bin/python import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") from newsblur_web import settings -username = settings.DATABASES['default']['USER'] -password = settings.DATABASES['default']['PASSWORD'] +username = settings.DATABASES["default"]["USER"] +password = settings.DATABASES["default"]["PASSWORD"] -if sys.argv[1] =='postgres_credentials': +if sys.argv[1] == "postgres_credentials": print(f"{username}:{password}") -if sys.argv[1] =='s3_bucket': - print(settings.S3_BACKUP_BUCKET) \ No newline at end of file +if sys.argv[1] == "s3_bucket": + print(settings.S3_BACKUP_BUCKET) diff --git a/ansible/utils/check_droplet.py b/ansible/utils/check_droplet.py index b231777aaa..1abcc0d437 100644 --- a/ansible/utils/check_droplet.py +++ b/ansible/utils/check_droplet.py @@ -3,6 +3,7 @@ import digitalocean import subprocess + def test_ssh(drop): droplet_ip_address = drop.ip_address result = subprocess.call(f"ssh -o StrictHostKeyChecking=no root@{droplet_ip_address} ls", shell=True) @@ -10,6 +11,7 @@ def test_ssh(drop): return True return False + TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" droplet_name = sys.argv[1] @@ -25,7 +27,7 @@ def test_ssh(drop): while not ssh_works: if timer > timeout: raise Exception(f"The {droplet_name} droplet was not created.") - + droplets = [drop for drop in manager.get_all_droplets() if drop.name == droplet_name] if droplets: droplet = droplets[0] @@ -33,4 +35,4 @@ def test_ssh(drop): ssh_works = test_ssh(droplet) time.sleep(3) timer += 3 -print("Success!") \ No newline at end of file +print("Success!") diff --git a/ansible/utils/generate_inventory.py b/ansible/utils/generate_inventory.py index d9e26f4a30..6fac02e791 100755 --- a/ansible/utils/generate_inventory.py +++ b/ansible/utils/generate_inventory.py @@ -8,7 +8,7 @@ OLD = False # Set env var OLD=1 to use existing servers -if os.environ.get('OLD', False): +if os.environ.get("OLD", False): OLD = True if OLD: @@ -17,7 +17,7 @@ TOKEN_FILE = "/srv/secrets-newsblur/keys/digital_ocean.token" try: - api_token = open(TOKEN_FILE, 'r').read().strip() + api_token = open(TOKEN_FILE, "r").read().strip() except IOError: print(f" ---> Missing Digital Ocean API token: {TOKEN_FILE}") exit() @@ -25,20 +25,20 @@ outfile = f"/srv/newsblur/ansible/inventories/digital_ocean{'.old' if OLD else ''}.ini" # Install from https://github.com/do-community/do-ansible-inventory/releases -ansible_inventory_cmd = f'do-ansible-inventory -t {api_token} --out {outfile}' +ansible_inventory_cmd = f"do-ansible-inventory -t {api_token} --out {outfile}" subprocess.call(ansible_inventory_cmd, shell=True) -with open(outfile, 'r') as original: +with open(outfile, "r") as original: data = original.read() -with open(outfile, 'w') as modified: +with open(outfile, "w") as modified: modified.write("127.0.0.1 ansible_connection=local\n" + data) -exit() # Too many requests if we run the below code +exit() # Too many requests if we run the below code do = digitalocean.Manager(token=api_token) droplets = do.get_all_droplets() -print("\n ---> Checking droplets: %s\n" % (' '.join([d.name for d in droplets]))) +print("\n ---> Checking droplets: %s\n" % (" ".join([d.name for d in droplets]))) def check_droplets_created(): @@ -46,8 +46,8 @@ def check_droplets_created(): droplets = do.get_all_droplets() for instance in droplets: - if instance.status == 'new': - print(".", end=' ') + if instance.status == "new": + print(".", end=" ") sys.stdout.flush() i += 1 time.sleep(i) @@ -56,6 +56,7 @@ def check_droplets_created(): print(" ---> All booted!") return True + i = 0 while True: if check_droplets_created(): diff --git a/api/newsblur.py b/api/newsblur.py index acdd2e437d..f417fe8896 100644 --- a/api/newsblur.py +++ b/api/newsblur.py @@ -13,342 +13,318 @@ # API_URL = "https://nb.local.host:8000/" -class request(): - +class request: opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(http.cookiejar.CookieJar())) - - def __init__(self, endpoint=None, method='get'): + + def __init__(self, endpoint=None, method="get"): self.endpoint = endpoint self.method = method def __call__(self, func): def wrapped(*args, **kwargs): params = func(*args, **kwargs) or {} - url = self.endpoint if self.endpoint else params.pop('url') + url = self.endpoint if self.endpoint else params.pop("url") params = urllib.parse.urlencode(params) - url = "%s%s" % (API_URL, url) - + url = "%s%s" % (API_URL, url) + response = self.opener.open(url, params).read() - + return json.loads(response) + return wrapped -class API: - @request('api/login', method='post') +class API: + @request("api/login", method="post") def login(self, username, password): - ''' + """ Login as an existing user. - If a user has no password set, you cannot just send any old password. + If a user has no password set, you cannot just send any old password. Required parameters, username and password, must be of string type. - ''' - return { - 'username': username, - 'password': password - } + """ + return {"username": username, "password": password} - @request('api/logout') + @request("api/logout") def logout(self): - ''' + """ Logout the currently logged in user. - ''' + """ return - @request('api/signup') + @request("api/signup") def signup(self, username, password, email): - ''' + """ Create a new user. All three required parameters must be of type string. - ''' - return { - 'signup_username': username, - 'signup_password': password, - 'signup_email': email - } + """ + return {"signup_username": username, "signup_password": password, "signup_email": email} - @request('rss_feeds/search_feed') + @request("rss_feeds/search_feed") def search_feed(self, address, offset=0): - ''' + """ Retrieve information about a feed from its website or RSS address. Parameter address must be of type string while parameter offset must be an integer. Will return a feed. - ''' - return { - 'address': address, - 'offset': offset - } + """ + return {"address": address, "offset": offset} - @request('reader/feeds') + @request("reader/feeds") def feeds(self, include_favicons=True, flat=False): - ''' + """ Retrieve a list of feeds to which a user is actively subscribed. Includes the 3 unread counts (positive, neutral, negative), as well as optional favicons. - ''' - return { - 'include_favicons': include_favicons, - 'flat': flat - } + """ + return {"include_favicons": include_favicons, "flat": flat} - @request('reader/favicons') + @request("reader/favicons") def favicons(self, feeds=None): - ''' - Retrieve a list of favicons for a list of feeds. - Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. - Useful for mobile devices, but requires a second request. - ''' + """ + Retrieve a list of favicons for a list of feeds. + Used when combined with /reader/feeds and include_favicons=false, so the feeds request contains far less data. + Useful for mobile devices, but requires a second request. + """ data = [] for feed in feeds: - data.append( ("feeds", feed) ) + data.append(("feeds", feed)) return data @request() def page(self, feed_id): - ''' + """ Retrieve the original page from a single feed. - ''' - return { - 'url': 'reader/page/%s' % feed_id - } + """ + return {"url": "reader/page/%s" % feed_id} @request() def feed(self, feed_id, page=1): - ''' + """ Retrieve the stories from a single feed. - ''' + """ return { - 'url': 'reader/feed/%s' % feed_id, - 'page': page, + "url": "reader/feed/%s" % feed_id, + "page": page, } - @request('reader/refresh_feeds') + @request("reader/refresh_feeds") def refresh_feeds(self): - ''' + """ Up-to-the-second unread counts for each active feed. Poll for these counts no more than once a minute. - ''' + """ return - @request('reader/feeds_trainer') + @request("reader/feeds_trainer") def feeds_trainer(self, feed_id=None): - ''' - Retrieves all popular and known intelligence classifiers. - Also includes user's own classifiers. - ''' + """ + Retrieves all popular and known intelligence classifiers. + Also includes user's own classifiers. + """ return { - 'feed_id': feed_id, + "feed_id": feed_id, } - + @request() def statistics(self, feed_id=None): - ''' + """ If you only want a user's classifiers, use /classifiers/:id. Omit the feed_id to get all classifiers for all subscriptions. - ''' - return { - 'url': 'rss_feeds/statistics/%d' % feed_id - } - - @request('rss_feeds/feed_autocomplete') + """ + return {"url": "rss_feeds/statistics/%d" % feed_id} + + @request("rss_feeds/feed_autocomplete") def feed_autocomplete(self, term): - ''' + """ Get a list of feeds that contain a search phrase. Searches by feed address, feed url, and feed title, in that order. Will only show sites with 2+ subscribers. - ''' - return { - 'term': term - } + """ + return {"term": term} - @request('reader/starred_stories') + @request("reader/starred_stories") def starred_stories(self, page=1): - ''' + """ Retrieve a user's starred stories. - ''' + """ return { - 'page': page, + "page": page, } - @request('reader/river_stories') + @request("reader/river_stories") def river_stories(self, feeds, page=1, read_stories_count=0): - ''' + """ Retrieve stories from a collection of feeds. This is known as the River of News. Stories are ordered in reverse chronological order. `read_stories_count` is the number of stories that have been read in this continuation, so NewsBlur can efficiently skip those stories when retrieving new stories. Takes an array of feed ids. - ''' - - data = [ ('page', page), ('read_stories_count', read_stories_count) ] + """ + + data = [("page", page), ("read_stories_count", read_stories_count)] for feed in feeds: - data.append( ("feeds", feed) ) + data.append(("feeds", feed)) return data - - @request('reader/mark_story_hashes_as_read') + + @request("reader/mark_story_hashes_as_read") def mark_story_hashes_as_read(self, story_hashes): - ''' - Mark stories as read using their unique story_hash. - ''' + """ + Mark stories as read using their unique story_hash. + """ data = [] for hash in story_hashes: - data.append( ("story_hash", hash) ) + data.append(("story_hash", hash)) return data - @request('reader/mark_story_as_read') + @request("reader/mark_story_as_read") def mark_story_as_read(self, feed_id, story_ids): - ''' - Mark stories as read. - Multiple story ids can be sent at once. - Each story must be from the same feed. - Takes an array of story ids. - ''' - - data = [ ('feed_id', feed_id) ] + """ + Mark stories as read. + Multiple story ids can be sent at once. + Each story must be from the same feed. + Takes an array of story ids. + """ + + data = [("feed_id", feed_id)] for story_id in story_ids: - data.append( ("story_id", story_id) ) + data.append(("story_id", story_id)) return data - @request('reader/mark_story_as_starred') + @request("reader/mark_story_as_starred") def mark_story_as_starred(self, feed_id, story_id): - ''' + """ Mark a story as starred (saved). - ''' + """ return { - 'feed_id': feed_id, - 'story_id': story_id, + "feed_id": feed_id, + "story_id": story_id, } - @request('reader/mark_all_as_read') + @request("reader/mark_all_as_read") def mark_all_as_read(self, days=0): - ''' + """ Mark all stories in a feed or list of feeds as read. - ''' + """ return { - 'days': days, + "days": days, } - @request('reader/add_url') - def add_url(self, url, folder=''): - ''' - Add a feed by its URL. + @request("reader/add_url") + def add_url(self, url, folder=""): + """ + Add a feed by its URL. Can be either the RSS feed or the website itself. - ''' + """ return { - 'url': url, - 'folder': folder, + "url": url, + "folder": folder, } - @request('reader/add_folder') - def add_folder(self, folder, parent_folder=''): - ''' + @request("reader/add_folder") + def add_folder(self, folder, parent_folder=""): + """ Add a new folder. - ''' + """ return { - 'folder': folder, - 'parent_folder': parent_folder, + "folder": folder, + "parent_folder": parent_folder, } - - @request('reader/rename_feed') + + @request("reader/rename_feed") def rename_feed(self, feed_id, feed_title): - ''' + """ Rename a feed title. Only the current user will see the new title. - ''' + """ return { - 'feed_id': feed_id, - 'feed_title': feed_title, + "feed_id": feed_id, + "feed_title": feed_title, } - - @request('reader/delete_feed') + + @request("reader/delete_feed") def delete_feed(self, feed_id, in_folder): - ''' + """ Unsubscribe from a feed. Removes it from the folder. - Set the in_folder parameter to remove a feed from the correct + Set the in_folder parameter to remove a feed from the correct folder, in case the user is subscribed to the feed in multiple folders. - ''' + """ return { - 'feed_id': feed_id, - 'in_folder': in_folder, + "feed_id": feed_id, + "in_folder": in_folder, } - - @request('reader/rename_folder') + + @request("reader/rename_folder") def rename_folder(self, folder_to_rename, new_folder_name, in_folder): - ''' + """ Rename a folder. - ''' + """ return { - 'folder_to_rename': folder_to_rename, - 'new_folder_name': new_folder_name, - 'in_folder': in_folder, + "folder_to_rename": folder_to_rename, + "new_folder_name": new_folder_name, + "in_folder": in_folder, } - - @request('reader/delete_folder') + + @request("reader/delete_folder") def delete_folder(self, folder_to_delete, in_folder): - ''' + """ Delete a folder and unsubscribe from all feeds inside. - ''' + """ return { - 'folder_to_delete': folder_to_delete, - 'in_folder': in_folder, + "folder_to_delete": folder_to_delete, + "in_folder": in_folder, } - - @request('reader/mark_feed_as_read') + + @request("reader/mark_feed_as_read") def mark_feed_as_read(self, feed_ids): - ''' + """ Mark a list of feeds as read. Takes an array of feeds. - ''' + """ data = [] for feed in feed_ids: - data.append( ("feed_id", feed) ) + data.append(("feed_id", feed)) return data - @request('reader/save_feed_order') + @request("reader/save_feed_order") def save_feed_order(self, folders): - ''' + """ Reorder feeds and move them around between folders. The entire folder structure needs to be serialized. - ''' + """ return { - 'folders': folders, + "folders": folders, } @request() def classifier(self, feed_id): - ''' - Get the intelligence classifiers for a user's site. - Only includes the user's own classifiers. - Use /reader/feeds_trainer for popular classifiers. - ''' + """ + Get the intelligence classifiers for a user's site. + Only includes the user's own classifiers. + Use /reader/feeds_trainer for popular classifiers. + """ return { - 'url': '/classifier/%d' % feed_id, + "url": "/classifier/%d" % feed_id, } - @request('classifier/save') + @request("classifier/save") def classifier_save(self, like_type, dislike_type, remove_like_type, remove_dislike_type): - ''' + """ Save intelligence classifiers (tags, titles, authors, and the feed) for a feed. - + TODO: Make this usable. - ''' + """ raise NotImplemented - - @request('import/opml_export') + @request("import/opml_export") def opml_export(self): - ''' + """ Download a backup of feeds and folders as an OPML file. Contains folders and feeds in XML; useful for importing in another RSS reader. - ''' + """ return - - @request('import/opml_upload') + + @request("import/opml_upload") def opml_upload(self, opml_file): - ''' + """ Upload an OPML file. - ''' + """ f = open(opml_file) - return { - 'file': f - } - - + return {"file": f} diff --git a/apps/analyzer/classifier.py b/apps/analyzer/classifier.py index 686d3720b0..3604720290 100644 --- a/apps/analyzer/classifier.py +++ b/apps/analyzer/classifier.py @@ -2,8 +2,8 @@ from django.db.models.aggregates import Sum import math + class Classifier: - def __init__(self, user, feed, phrases): self.user = user self.feed = feed @@ -11,7 +11,7 @@ def __init__(self, user, feed, phrases): def get_features(self, doc): found = {} - + for phrase in self.phrases: if phrase in doc: if phrase in found: @@ -20,36 +20,40 @@ def get_features(self, doc): found[phrase] = 1 return found - + def increment_feature(self, feature, category): - count = self.feature_count(feature,category) - if count==0: + count = self.feature_count(feature, category) + if count == 0: fc = FeatureCategory(user=self.user, feed=self.feed, feature=feature, category=category, count=1) fc.save() else: - fc = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) + fc = FeatureCategory.objects.get( + user=self.user, feed=self.feed, feature=feature, category=category + ) fc.count = count + 1 fc.save() - + def feature_count(self, feature, category): if isinstance(category, Category): category = category.category - + try: - feature_count = FeatureCategory.objects.get(user=self.user, feed=self.feed, feature=feature, category=category) + feature_count = FeatureCategory.objects.get( + user=self.user, feed=self.feed, feature=feature, category=category + ) except FeatureCategory.DoesNotExist: return 0 else: return float(feature_count.count) - def increment_category(self,category): + def increment_category(self, category): count = self.category_count(category) - if count==0: + if count == 0: category = Category(user=self.user, feed=self.feed, category=category, count=1) category.save() else: category = Category.objects.get(user=self.user, feed=self.feed, category=category) - category.count = count+1 + category.count = count + 1 category.save() def category_count(self, category): @@ -68,12 +72,12 @@ def categories(self): return categories def totalcount(self): - categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum('count')) - return categories['sum'] + categories = Category.objects.filter(user=self.user, feed=self.feed).aggregate(sum=Sum("count")) + return categories["sum"] def train(self, item, category): features = self.get_features(item) - + # Increment the count for every feature with this category for feature in features: self.increment_feature(feature, category) @@ -84,7 +88,7 @@ def train(self, item, category): def feature_probability(self, feature, category): if self.category_count(category) == 0: return 0 - # The total number of times this feature appeared in this + # The total number of times this feature appeared in this # category divided by the total number of items in this category return self.feature_count(feature, category) / self.category_count(category) @@ -96,21 +100,20 @@ def weighted_probability(self, feature, category, prf, weight=1.0, ap=0.5): totals = sum([self.feature_count(feature, c) for c in self.categories()]) # Calculate the weighted average - bp = ((weight*ap) + (totals*basic_prob)) / (weight+totals) + bp = ((weight * ap) + (totals * basic_prob)) / (weight + totals) print(feature, category, basic_prob, totals, bp) return bp class FisherClassifier(Classifier): - def __init__(self, user, feed, phrases): Classifier.__init__(self, user, feed, phrases) self.minimums = {} - + def category_probability(self, feature, category): - # The frequency of this feature in this category + # The frequency of this feature in this category clf = self.feature_probability(feature, category) - if clf==0: + if clf == 0: return 0 # The frequency of this feature in all the categories @@ -119,54 +122,53 @@ def category_probability(self, feature, category): # The probability is the frequency in this category divided by # the overall frequency p = clf / freqsum - + return p - + def fisher_probability(self, item, category): # Multiply all the probabilities together - p = .5 + p = 0.5 features = self.get_features(item) if features: p = 1 - + for feature in features: - p *= (self.weighted_probability(feature, category, self.category_probability)) + p *= self.weighted_probability(feature, category, self.category_probability) # Take the natural log and multiply by -2 - fscore = -2*math.log(p) + fscore = -2 * math.log(p) # Use the inverse chi2 function to get a probability - return self.invchi2(fscore,len(features)*2) - + return self.invchi2(fscore, len(features) * 2) + def invchi2(self, chi, df): m = chi / 2.0 sum = term = math.exp(-m) - for i in range(1, df//2): + for i in range(1, df // 2): term *= m / i sum += term return min(sum, 1.0) - def setminimum(self, category, min): self.minimums[category] = min - + def getminimum(self, category): if category not in self.minimums: return 0 return self.minimums[category] - - def classify(self,item,default=None): + + def classify(self, item, default=None): # Loop through looking for the best result best = default max = 0.0 print(self.categories(), item) for category in self.categories(): - p=self.fisher_probability(item, category) + p = self.fisher_probability(item, category) # Make sure it exceeds its minimum if p > self.getminimum(category) and p > max: best = category max = p - - return best \ No newline at end of file + + return best diff --git a/apps/analyzer/feed_filter.py b/apps/analyzer/feed_filter.py index b900ec989e..7b3a6bbcf7 100644 --- a/apps/analyzer/feed_filter.py +++ b/apps/analyzer/feed_filter.py @@ -6,36 +6,38 @@ import re import math + def entry_features(self, entry): - splitter=re.compile('\\W*') - f={} + splitter = re.compile("\\W*") + f = {} # Extract the title words and annotate - titlewords=[s.lower() for s in splitter.split(entry['title']) - if len(s)>2 and len(s)<20] - - for w in titlewords: f['Title:'+w]=1 + titlewords = [s.lower() for s in splitter.split(entry["title"]) if len(s) > 2 and len(s) < 20] + + for w in titlewords: + f["Title:" + w] = 1 # Extract the summary words - summarywords=[s.lower() for s in splitter.split(entry['summary']) - if len(s)>2 and len(s)<20] + summarywords = [s.lower() for s in splitter.split(entry["summary"]) if len(s) > 2 and len(s) < 20] # Count uppercase words - uc=0 + uc = 0 for i in range(len(summarywords)): - w=summarywords[i] - f[w]=1 - if w.isupper(): uc+=1 + w = summarywords[i] + f[w] = 1 + if w.isupper(): + uc += 1 # Get word pairs in summary as features - if i0.3: f['UPPERCASE']=1 + # UPPERCASE is a virtual word flagging too much shouting + if float(uc) / len(summarywords) > 0.3: + f["UPPERCASE"] = 1 return f diff --git a/apps/analyzer/forms.py b/apps/analyzer/forms.py index 5377f3b0cb..6dc21e22ee 100644 --- a/apps/analyzer/forms.py +++ b/apps/analyzer/forms.py @@ -8,25 +8,22 @@ from apps.profile.models import change_password, blank_authenticate, MGiftCode from apps.social.models import MSocialProfile + class PopularityQueryForm(forms.Form): - email = forms.CharField(widget=forms.TextInput(), - label="Your email address", - required=False) - query = forms.CharField(widget=forms.TextInput(), - label="Keywords", - required=False) + email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False) + query = forms.CharField(widget=forms.TextInput(), label="Keywords", required=False) def __init__(self, *args, **kwargs): super(PopularityQueryForm, self).__init__(*args, **kwargs) - + def clean_email(self): - if not self.cleaned_data['email']: - raise forms.ValidationError('Please enter in an email address.') + if not self.cleaned_data["email"]: + raise forms.ValidationError("Please enter in an email address.") + + return self.cleaned_data["email"] - return self.cleaned_data['email'] - def clean_query(self): - if not self.cleaned_data['query']: - raise forms.ValidationError('Please enter in a keyword search query.') + if not self.cleaned_data["query"]: + raise forms.ValidationError("Please enter in a keyword search query.") - return self.cleaned_data['query'] + return self.cleaned_data["query"] diff --git a/apps/analyzer/lda.py b/apps/analyzer/lda.py index b354d17145..2d9d94f4b0 100644 --- a/apps/analyzer/lda.py +++ b/apps/analyzer/lda.py @@ -9,226 +9,234 @@ def lgammln(xx): - """ - Returns the gamma function of xx. - Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. - (Adapted from: Numerical Recipies in C.) - - Usage: lgammln(xx) - - Copied from stats.py by strang@nmr.mgh.harvard.edu - """ - - coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, - 0.120858003e-2, -0.536382e-5] - x = xx - 1.0 - tmp = x + 5.5 - tmp = tmp - (x+0.5)*log(tmp) - ser = 1.0 - for j in range(len(coeff)): - x = x + 1 - ser = ser + coeff[j]/x - return -tmp + log(2.50662827465*ser) + """ + Returns the gamma function of xx. + Gamma(z) = Integral(0,infinity) of t^(z-1)exp(-t) dt. + (Adapted from: Numerical Recipies in C.) -def log_sum(log_a, log_b): - if log_a < log_b: - return log_b + log(1 + exp(log_a - log_b)) - else: - return log_a + log(1 + exp(log_b - log_a)) + Usage: lgammln(xx) -def log_normalize(dist): - normalizer = reduce(log_sum, dist) - for ii in xrange(len(dist)): - dist[ii] -= normalizer - return dist + Copied from stats.py by strang@nmr.mgh.harvard.edu + """ -def log_sample(dist): - """ - Sample a key from a dictionary using the values as probabilities (unnormalized) - """ - cutoff = random() - dist = log_normalize(dist) - #print "Normalizer: ", normalizer - - current = 0 - for ii in xrange(len(dist)): - current += exp(dist[ii]) - if current >= cutoff: - #print "Chose", i - return ii - assert False, "Didn't choose anything: %f %f" % (cutoff, current) + coeff = [76.18009173, -86.50532033, 24.01409822, -1.231739516, 0.120858003e-2, -0.536382e-5] + x = xx - 1.0 + tmp = x + 5.5 + tmp = tmp - (x + 0.5) * log(tmp) + ser = 1.0 + for j in range(len(coeff)): + x = x + 1 + ser = ser + coeff[j] / x + return -tmp + log(2.50662827465 * ser) -def create_data(stories, lang="english", doc_limit=-1, delimiter=""): - from nltk.tokenize.treebank import TreebankWordTokenizer - tokenizer = TreebankWordTokenizer() - - from nltk.corpus import stopwords - stop = stopwords.words('english') - - from string import ascii_lowercase - - docs = {} - print("Found %i stories" % stories.count()) - for story in stories: - text = zlib.decompress(story.story_content_z) - # text = story.story_title - text = ''.join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower() - if delimiter: - sections = text.split(delimiter) + +def log_sum(log_a, log_b): + if log_a < log_b: + return log_b + log(1 + exp(log_a - log_b)) else: - sections = [text] - - if doc_limit > 0 and len(docs) > doc_limit: - print("Passed doc limit %i" % len(docs)) - break - print(story.story_title, len(sections)) - - for jj in xrange(len(sections)): - docs["%s-%i" % (story.story_title, jj)] = [x for x in tokenizer.tokenize(sections[jj]) \ - if (not x in stop) and \ - (min(y in ascii_lowercase for y in x))] - return docs + return log_a + log(1 + exp(log_b - log_a)) -class LdaSampler: - def __init__(self, num_topics, doc_smoothing = 0.1, topic_smoothing = 0.01): - self._docs = defaultdict(FreqDist) - self._topics = defaultdict(FreqDist) - self._K = num_topics - self._state = None - - self._alpha = doc_smoothing - self._lambda = topic_smoothing - - def optimize_hyperparameters(self, samples=5, step = 3.0): - rawParam = [log(self._alpha), log(self._lambda)] - - for ii in xrange(samples): - lp_old = self.lhood(self._alpha, self._lambda) - lp_new = log(random()) + lp_old - print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda)) - - l = [x - random() * step for x in rawParam] - r = [x + step for x in rawParam] - - for jj in xrange(100): - rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))] - trial_alpha, trial_lambda = [exp(x) for x in rawParamNew] - lp_test = self.lhood(trial_alpha, trial_lambda) - #print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda)) - - if lp_test > lp_new: - print(jj) - self._alpha = exp(rawParamNew[0]) - self._lambda = exp(rawParamNew[1]) - self._alpha_sum = self._alpha * self._K - self._lambda_sum = self._lambda * self._W - rawParam = [log(self._alpha), log(self._lambda)] - break - else: - for dd in xrange(len(rawParamNew)): - if rawParamNew[dd] < rawParam[dd]: - l[dd] = rawParamNew[dd] - else: - r[dd] = rawParamNew[dd] - assert l[dd] <= rawParam[dd] - assert r[dd] >= rawParam[dd] - - print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda)) - - def lhood(self, doc_smoothing, voc_smoothing): - doc_sum = doc_smoothing * self._K - voc_sum = voc_smoothing * self._W - - val = 0.0 - val += lgammln(doc_sum) * len(self._docs) - val -= lgammln(doc_smoothing) * self._K * len(self._docs) - for ii in self._docs: - for jj in xrange(self._K): - val += lgammln(doc_smoothing + self._docs[ii][jj]) - val -= lgammln(doc_sum + self._docs[ii].N()) - - val += lgammln(voc_sum) * self._K - val -= lgammln(voc_smoothing) * self._W * self._K - for ii in self._topics: - for jj in self._vocab: - val += lgammln(voc_smoothing + self._topics[ii][jj]) - val -= lgammln(voc_sum + self._topics[ii].N()) - return val - - def initialize(self, data): - """ - Data should be keyed by doc-id, values should be iterable - """ - self._alpha_sum = self._alpha * self._K - self._state = defaultdict(dict) +def log_normalize(dist): + normalizer = reduce(log_sum, dist) + for ii in xrange(len(dist)): + dist[ii] -= normalizer + return dist - self._vocab = set([]) - for dd in data: - for ww in xrange(len(data[dd])): - # Learn all the words we'll see - self._vocab.add(data[dd][ww]) - # Initialize the state to unassigned - self._state[dd][ww] = -1 +def log_sample(dist): + """ + Sample a key from a dictionary using the values as probabilities (unnormalized) + """ + cutoff = random() + dist = log_normalize(dist) + # print "Normalizer: ", normalizer - self._W = len(self._vocab) - self._lambda_sum = float(self._W) * self._lambda + current = 0 + for ii in xrange(len(dist)): + current += exp(dist[ii]) + if current >= cutoff: + # print "Chose", i + return ii + assert False, "Didn't choose anything: %f %f" % (cutoff, current) - self._data = data - print("Initialized vocab of size %i" % len(self._vocab)) +def create_data(stories, lang="english", doc_limit=-1, delimiter=""): + from nltk.tokenize.treebank import TreebankWordTokenizer - def prob(self, doc, word, topic): - val = log(self._docs[doc][topic] + self._alpha) - # This is constant across a document, so we don't need to compute this term - # val -= log(self._docs[doc].N() + self._alpha_sum) - - val += log(self._topics[topic][word] + self._lambda) - val -= log(self._topics[topic].N() + self._lambda_sum) + tokenizer = TreebankWordTokenizer() - # print doc, word, topic, self._docs[doc][topic], self._topics[topic][word] - - return val + from nltk.corpus import stopwords - def sample_word(self, doc, position): - word = self._data[doc][position] + stop = stopwords.words("english") - old_topic = self._state[doc][position] - if old_topic != -1: - self.change_count(doc, word, old_topic, -1) + from string import ascii_lowercase - probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)] - new_topic = log_sample(probs) - #print doc, word, new_topic + docs = {} + print("Found %i stories" % stories.count()) + for story in stories: + text = zlib.decompress(story.story_content_z) + # text = story.story_title + text = "".join(BeautifulSoup(text, features="lxml").findAll(text=True)).lower() + if delimiter: + sections = text.split(delimiter) + else: + sections = [text] - self.change_count(doc, word, new_topic, 1) - self._state[doc][position] = new_topic + if doc_limit > 0 and len(docs) > doc_limit: + print("Passed doc limit %i" % len(docs)) + break + print(story.story_title, len(sections)) - def change_count(self, doc, word, topic, delta): - self._docs[doc].inc(topic, delta) - self._topics[topic].inc(word, delta) + for jj in xrange(len(sections)): + docs["%s-%i" % (story.story_title, jj)] = [ + x + for x in tokenizer.tokenize(sections[jj]) + if (not x in stop) and (min(y in ascii_lowercase for y in x)) + ] + return docs - def sample(self, iterations = 100, hyper_delay = 10): - assert self._state - for ii in xrange(iterations): - for dd in self._data: - for ww in xrange(len(self._data[dd])): - self.sample_word(dd, ww) - print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda))) - if hyper_delay >= 0 and ii % hyper_delay == 0: - self.optimize_hyperparameters() - def print_topics(self, num_words=15): - for ii in self._topics: - print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words]))) +class LdaSampler: + def __init__(self, num_topics, doc_smoothing=0.1, topic_smoothing=0.01): + self._docs = defaultdict(FreqDist) + self._topics = defaultdict(FreqDist) + self._K = num_topics + self._state = None + + self._alpha = doc_smoothing + self._lambda = topic_smoothing + + def optimize_hyperparameters(self, samples=5, step=3.0): + rawParam = [log(self._alpha), log(self._lambda)] + + for ii in xrange(samples): + lp_old = self.lhood(self._alpha, self._lambda) + lp_new = log(random()) + lp_old + print("OLD: %f\tNEW: %f at (%f, %f)" % (lp_old, lp_new, self._alpha, self._lambda)) + + l = [x - random() * step for x in rawParam] + r = [x + step for x in rawParam] + + for jj in xrange(100): + rawParamNew = [l[x] + random() * (r[x] - l[x]) for x in xrange(len(rawParam))] + trial_alpha, trial_lambda = [exp(x) for x in rawParamNew] + lp_test = self.lhood(trial_alpha, trial_lambda) + # print("TRYING: %f (need %f) at (%f, %f)" % (lp_test - lp_old, lp_new - lp_old, trial_alpha, trial_lambda)) + + if lp_test > lp_new: + print(jj) + self._alpha = exp(rawParamNew[0]) + self._lambda = exp(rawParamNew[1]) + self._alpha_sum = self._alpha * self._K + self._lambda_sum = self._lambda * self._W + rawParam = [log(self._alpha), log(self._lambda)] + break + else: + for dd in xrange(len(rawParamNew)): + if rawParamNew[dd] < rawParam[dd]: + l[dd] = rawParamNew[dd] + else: + r[dd] = rawParamNew[dd] + assert l[dd] <= rawParam[dd] + assert r[dd] >= rawParam[dd] + + print("\nNew hyperparameters (%i): %f %f" % (jj, self._alpha, self._lambda)) + + def lhood(self, doc_smoothing, voc_smoothing): + doc_sum = doc_smoothing * self._K + voc_sum = voc_smoothing * self._W + + val = 0.0 + val += lgammln(doc_sum) * len(self._docs) + val -= lgammln(doc_smoothing) * self._K * len(self._docs) + for ii in self._docs: + for jj in xrange(self._K): + val += lgammln(doc_smoothing + self._docs[ii][jj]) + val -= lgammln(doc_sum + self._docs[ii].N()) + + val += lgammln(voc_sum) * self._K + val -= lgammln(voc_smoothing) * self._W * self._K + for ii in self._topics: + for jj in self._vocab: + val += lgammln(voc_smoothing + self._topics[ii][jj]) + val -= lgammln(voc_sum + self._topics[ii].N()) + return val + + def initialize(self, data): + """ + Data should be keyed by doc-id, values should be iterable + """ + + self._alpha_sum = self._alpha * self._K + self._state = defaultdict(dict) + + self._vocab = set([]) + for dd in data: + for ww in xrange(len(data[dd])): + # Learn all the words we'll see + self._vocab.add(data[dd][ww]) + + # Initialize the state to unassigned + self._state[dd][ww] = -1 + + self._W = len(self._vocab) + self._lambda_sum = float(self._W) * self._lambda + + self._data = data + + print("Initialized vocab of size %i" % len(self._vocab)) + + def prob(self, doc, word, topic): + val = log(self._docs[doc][topic] + self._alpha) + # This is constant across a document, so we don't need to compute this term + # val -= log(self._docs[doc].N() + self._alpha_sum) + + val += log(self._topics[topic][word] + self._lambda) + val -= log(self._topics[topic].N() + self._lambda_sum) + + # print doc, word, topic, self._docs[doc][topic], self._topics[topic][word] + + return val + + def sample_word(self, doc, position): + word = self._data[doc][position] + + old_topic = self._state[doc][position] + if old_topic != -1: + self.change_count(doc, word, old_topic, -1) + + probs = [self.prob(doc, self._data[doc][position], x) for x in xrange(self._K)] + new_topic = log_sample(probs) + # print doc, word, new_topic + + self.change_count(doc, word, new_topic, 1) + self._state[doc][position] = new_topic + + def change_count(self, doc, word, topic, delta): + self._docs[doc].inc(topic, delta) + self._topics[topic].inc(word, delta) + + def sample(self, iterations=100, hyper_delay=10): + assert self._state + for ii in xrange(iterations): + for dd in self._data: + for ww in xrange(len(self._data[dd])): + self.sample_word(dd, ww) + print("Iteration %i %f" % (ii, self.lhood(self._alpha, self._lambda))) + if hyper_delay >= 0 and ii % hyper_delay == 0: + self.optimize_hyperparameters() + + def print_topics(self, num_words=15): + for ii in self._topics: + print("%i:%s\n" % (ii, "\t".join(self._topics[ii].keys()[:num_words]))) if __name__ == "__main__": - stories = MStory.objects(story_feed_id=199) - d = create_data(stories, doc_limit=250, delimiter="") - lda = LdaSampler(5) - lda.initialize(d) + stories = MStory.objects(story_feed_id=199) + d = create_data(stories, doc_limit=250, delimiter="") + lda = LdaSampler(5) + lda.initialize(d) - lda.sample(50) - lda.print_topics() \ No newline at end of file + lda.sample(50) + lda.print_topics() diff --git a/apps/analyzer/migrations/0001_initial.py b/apps/analyzer/migrations/0001_initial.py index b83bf543f9..a8b5898836 100644 --- a/apps/analyzer/migrations/0001_initial.py +++ b/apps/analyzer/migrations/0001_initial.py @@ -6,34 +6,49 @@ class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Category', + name="Category", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('category', models.CharField(max_length=255)), - ('count', models.IntegerField(default=0)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("category", models.CharField(max_length=255)), + ("count", models.IntegerField(default=0)), + ("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), migrations.CreateModel( - name='FeatureCategory', + name="FeatureCategory", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feature', models.CharField(max_length=255)), - ('category', models.CharField(max_length=255)), - ('count', models.IntegerField(default=0)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feature", models.CharField(max_length=255)), + ("category", models.CharField(max_length=255)), + ("count", models.IntegerField(default=0)), + ("feed", models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="rss_feeds.Feed")), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], ), ] diff --git a/apps/analyzer/models.py b/apps/analyzer/models.py index dc3d01f551..b9eb99b6b6 100644 --- a/apps/analyzer/models.py +++ b/apps/analyzer/models.py @@ -10,24 +10,26 @@ from apps.analyzer.tasks import EmailPopularityQuery from utils import log as logging + class FeatureCategory(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE) feature = models.CharField(max_length=255) category = models.CharField(max_length=255) count = models.IntegerField(default=0) - + def __str__(self): - return '%s - %s (%s)' % (self.feature, self.category, self.count) + return "%s - %s (%s)" % (self.feature, self.category, self.count) + class Category(models.Model): user = models.ForeignKey(User, on_delete=models.CASCADE) feed = models.ForeignKey(Feed, on_delete=models.CASCADE) category = models.CharField(max_length=255) count = models.IntegerField(default=0) - + def __str__(self): - return '%s (%s)' % (self.category, self.count) + return "%s (%s)" % (self.category, self.count) class MPopularityQuery(mongo.Document): @@ -35,55 +37,53 @@ class MPopularityQuery(mongo.Document): query = mongo.StringField() is_emailed = mongo.BooleanField() creation_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'popularity_query', - 'allow_inheritance': False, + "collection": "popularity_query", + "allow_inheritance": False, } - + def __str__(self): - return "%s - \"%s\"" % (self.email, self.query) + return '%s - "%s"' % (self.email, self.query) def queue_email(self): EmailPopularityQuery.delay(pk=str(self.pk)) - + @classmethod def ensure_all_sent(cls, queue=True): - for query in cls.objects.all().order_by('creation_date'): + for query in cls.objects.all().order_by("creation_date"): query.ensure_sent(queue=queue) - + def ensure_sent(self, queue=True): if self.is_emailed: logging.debug(" ---> Already sent %s" % self) return - + if queue: self.queue_email() else: self.send_email() - + def send_email(self, limit=5000): filename = Feed.xls_query_popularity(self.query, limit=limit) xlsx = open(filename, "r") - - params = { - 'query': self.query - } - text = render_to_string('mail/email_popularity_query.txt', params) - html = render_to_string('mail/email_popularity_query.xhtml', params) - subject = "Keyword popularity spreadsheet: \"%s\"" % self.query - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['<%s>' % (self.email)]) + + params = {"query": self.query} + text = render_to_string("mail/email_popularity_query.txt", params) + html = render_to_string("mail/email_popularity_query.xhtml", params) + subject = 'Keyword popularity spreadsheet: "%s"' % self.query + msg = EmailMultiAlternatives( + subject, text, from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, to=["<%s>" % (self.email)] + ) msg.attach_alternative(html, "text/html") - msg.attach(filename, xlsx.read(), 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet') + msg.attach(filename, xlsx.read(), "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet") msg.send() - + self.is_emailed = True self.save() - + logging.debug(" -> ~BB~FM~SBSent email for popularity query: %s" % self) - + class MClassifierTitle(mongo.Document): user_id = mongo.IntField() @@ -92,68 +92,69 @@ class MClassifierTitle(mongo.Document): title = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_title', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_title", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.title[:30]) - - + + class MClassifierAuthor(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'author')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "author")) feed_id = mongo.IntField() social_user_id = mongo.IntField() author = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_author', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_author", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.author[:30]) + class MClassifierTag(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id', 'tag')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id", "tag")) feed_id = mongo.IntField() social_user_id = mongo.IntField() tag = mongo.StringField(max_length=255) score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_tag', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_tag", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, self.tag[:30]) - + class MClassifierFeed(mongo.Document): - user_id = mongo.IntField(unique_with=('feed_id', 'social_user_id')) + user_id = mongo.IntField(unique_with=("feed_id", "social_user_id")) feed_id = mongo.IntField() social_user_id = mongo.IntField() score = mongo.IntField() creation_date = mongo.DateTimeField() - + meta = { - 'collection': 'classifier_feed', - 'indexes': [('user_id', 'feed_id'), 'feed_id', ('user_id', 'social_user_id'), 'social_user_id'], - 'allow_inheritance': False, + "collection": "classifier_feed", + "indexes": [("user_id", "feed_id"), "feed_id", ("user_id", "social_user_id"), "social_user_id"], + "allow_inheritance": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) if self.feed_id: @@ -161,94 +162,105 @@ def __str__(self): else: feed = User.objects.get(pk=self.social_user_id) return "%s - %s/%s: (%s) %s" % (user, self.feed_id, self.social_user_id, self.score, feed) - + def compute_story_score(story, classifier_titles, classifier_authors, classifier_tags, classifier_feeds): intelligence = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } score = 0 - score_max = max(intelligence['title'], - intelligence['author'], - intelligence['tags']) - score_min = min(intelligence['title'], - intelligence['author'], - intelligence['tags']) + score_max = max(intelligence["title"], intelligence["author"], intelligence["tags"]) + score_min = min(intelligence["title"], intelligence["author"], intelligence["tags"]) if score_max > 0: score = score_max elif score_min < 0: score = score_min if score == 0: - score = intelligence['feed'] - + score = intelligence["feed"] + return score - + + def apply_classifier_titles(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if classifier.title.lower() in story['story_title'].lower(): + if classifier.title.lower() in story["story_title"].lower(): # print 'Titles: (%s) %s -- %s' % (classifier.title in story['story_title'], classifier.title, story['story_title']) score = classifier.score - if score > 0: return score + if score > 0: + return score return score - + + def apply_classifier_authors(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if story.get('story_authors') and classifier.author == story.get('story_authors'): + if story.get("story_authors") and classifier.author == story.get("story_authors"): # print 'Authors: %s -- %s' % (classifier.author, story['story_authors']) score = classifier.score - if score > 0: return classifier.score + if score > 0: + return classifier.score return score - + + def apply_classifier_tags(classifiers, story): score = 0 for classifier in classifiers: - if classifier.feed_id != story['story_feed_id']: + if classifier.feed_id != story["story_feed_id"]: continue - if story['story_tags'] and classifier.tag in story['story_tags']: + if story["story_tags"] and classifier.tag in story["story_tags"]: # print 'Tags: (%s-%s) %s -- %s' % (classifier.tag in story['story_tags'], classifier.score, classifier.tag, story['story_tags']) score = classifier.score - if score > 0: return classifier.score + if score > 0: + return classifier.score return score - + + def apply_classifier_feeds(classifiers, feed, social_user_ids=None): - if not feed and not social_user_ids: return 0 + if not feed and not social_user_ids: + return 0 feed_id = None if feed: feed_id = feed if isinstance(feed, int) else feed.pk - + if social_user_ids and not isinstance(social_user_ids, list): social_user_ids = [social_user_ids] - + for classifier in classifiers: if classifier.feed_id == feed_id: # print 'Feeds: %s -- %s' % (classifier.feed_id, feed.pk) return classifier.score - if (social_user_ids and not classifier.feed_id and - classifier.social_user_id in social_user_ids): + if social_user_ids and not classifier.feed_id and classifier.social_user_id in social_user_ids: return classifier.score return 0 - -def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier_feeds=None, classifier_authors=None, - classifier_titles=None, classifier_tags=None): + + +def get_classifiers_for_user( + user, + feed_id=None, + social_user_id=None, + classifier_feeds=None, + classifier_authors=None, + classifier_titles=None, + classifier_tags=None, +): params = dict(user_id=user.pk) if isinstance(feed_id, list): - params['feed_id__in'] = feed_id + params["feed_id__in"] = feed_id elif feed_id: - params['feed_id'] = feed_id + params["feed_id"] = feed_id if social_user_id: if isinstance(social_user_id, str): - social_user_id = int(social_user_id.replace('social:', '')) - params['social_user_id'] = social_user_id + social_user_id = int(social_user_id.replace("social:", "")) + params["social_user_id"] = social_user_id if classifier_authors is None: classifier_authors = list(MClassifierAuthor.objects(**params)) @@ -258,49 +270,56 @@ def get_classifiers_for_user(user, feed_id=None, social_user_id=None, classifier classifier_tags = list(MClassifierTag.objects(**params)) if classifier_feeds is None: if not social_user_id and feed_id: - params['social_user_id'] = 0 + params["social_user_id"] = 0 classifier_feeds = list(MClassifierFeed.objects(**params)) - + feeds = [] for f in classifier_feeds: if f.social_user_id and not f.feed_id: - feeds.append(('social:%s' % f.social_user_id, f.score)) + feeds.append(("social:%s" % f.social_user_id, f.score)) else: feeds.append((f.feed_id, f.score)) - + payload = { - 'feeds': dict(feeds), - 'authors': dict([(a.author, a.score) for a in classifier_authors]), - 'titles': dict([(t.title, t.score) for t in classifier_titles]), - 'tags': dict([(t.tag, t.score) for t in classifier_tags]), + "feeds": dict(feeds), + "authors": dict([(a.author, a.score) for a in classifier_authors]), + "titles": dict([(t.title, t.score) for t in classifier_titles]), + "tags": dict([(t.tag, t.score) for t in classifier_tags]), } - + return payload - -def sort_classifiers_by_feed(user, feed_ids=None, - classifier_feeds=None, - classifier_authors=None, - classifier_titles=None, - classifier_tags=None): + + +def sort_classifiers_by_feed( + user, + feed_ids=None, + classifier_feeds=None, + classifier_authors=None, + classifier_titles=None, + classifier_tags=None, +): def sort_by_feed(classifiers): feed_classifiers = defaultdict(list) for classifier in classifiers: feed_classifiers[classifier.feed_id].append(classifier) return feed_classifiers - + classifiers = {} if feed_ids: - classifier_feeds = sort_by_feed(classifier_feeds) + classifier_feeds = sort_by_feed(classifier_feeds) classifier_authors = sort_by_feed(classifier_authors) - classifier_titles = sort_by_feed(classifier_titles) - classifier_tags = sort_by_feed(classifier_tags) + classifier_titles = sort_by_feed(classifier_titles) + classifier_tags = sort_by_feed(classifier_tags) for feed_id in feed_ids: - classifiers[feed_id] = get_classifiers_for_user(user, feed_id=feed_id, - classifier_feeds=classifier_feeds[feed_id], - classifier_authors=classifier_authors[feed_id], - classifier_titles=classifier_titles[feed_id], - classifier_tags=classifier_tags[feed_id]) - + classifiers[feed_id] = get_classifiers_for_user( + user, + feed_id=feed_id, + classifier_feeds=classifier_feeds[feed_id], + classifier_authors=classifier_authors[feed_id], + classifier_titles=classifier_titles[feed_id], + classifier_tags=classifier_tags[feed_id], + ) + return classifiers diff --git a/apps/analyzer/phrase_filter.py b/apps/analyzer/phrase_filter.py index 70fe77c01f..fe025f84bb 100644 --- a/apps/analyzer/phrase_filter.py +++ b/apps/analyzer/phrase_filter.py @@ -1,39 +1,39 @@ import re from pprint import pprint + class PhraseFilter: - def __init__(self): self.phrases = {} - + def run(self, text, storyid): chunks = self.chunk(text) self.count_phrases(chunks, storyid) - + def print_phrases(self): pprint(self.phrases) - + def get_phrases(self): return self.phrases.keys() - + # =========== # = Chunker = # =========== - + def chunk(self, text): - chunks = [t.strip() for t in re.split('[^a-zA-Z-]+', text) if t] + chunks = [t.strip() for t in re.split("[^a-zA-Z-]+", text) if t] # chunks = self._lowercase(chunks) return chunks - + def _lowercase(self, chunks): return [c.lower() for c in chunks] - + # ================== # = Phrase Counter = # ================== - + def count_phrases(self, chunks, storyid): - for l in range(1, len(chunks)+1): + for l in range(1, len(chunks) + 1): combinations = self._get_combinations(chunks, l) # print "Combinations: %s" % combinations for phrase in combinations: @@ -41,23 +41,23 @@ def count_phrases(self, chunks, storyid): self.phrases[phrase] = [] if storyid not in self.phrases[phrase]: self.phrases[phrase].append(storyid) - + def _get_combinations(self, chunks, length): combinations = [] for i, chunk in enumerate(chunks): # 0,1,2,3,4,5,6 = 01 12 23 34 45 56 combination = [] for l in range(length): - if i+l < len(chunks): + if i + l < len(chunks): # print i, l, chunks[i+l], len(chunks) - combination.append(chunks[i+l]) - combinations.append(' '.join(combination)) + combination.append(chunks[i + l]) + combinations.append(" ".join(combination)) return combinations - + # ================= # = Phrase Paring = # ================= - + def pare_phrases(self): # Kill singles for phrase, counts in self.phrases.items(): @@ -67,27 +67,32 @@ def pare_phrases(self): if len(phrase) < 4: del self.phrases[phrase] continue - + # Kill repeats for phrase in self.phrases.keys(): for phrase2 in self.phrases.keys(): - if phrase in self.phrases and len(phrase2) > len(phrase) and phrase in phrase2 and phrase != phrase2: + if ( + phrase in self.phrases + and len(phrase2) > len(phrase) + and phrase in phrase2 + and phrase != phrase2 + ): del self.phrases[phrase] - -if __name__ == '__main__': + + +if __name__ == "__main__": phrasefilter = PhraseFilter() - phrasefilter.run('House of the Day: 123 Atlantic Ave. #3', 1) - phrasefilter.run('House of the Day: 456 Plankton St. #3', 4) - phrasefilter.run('Coop of the Day: 321 Pacific St.', 2) - phrasefilter.run('Streetlevel: 393 Pacific St.', 11) - phrasefilter.run('Coop of the Day: 456 Jefferson Ave.', 3) - phrasefilter.run('Extra, Extra', 5) - phrasefilter.run('Extra, Extra', 6) - phrasefilter.run('Early Addition', 7) - phrasefilter.run('Early Addition', 8) - phrasefilter.run('Development Watch', 9) - phrasefilter.run('Streetlevel', 10) - + phrasefilter.run("House of the Day: 123 Atlantic Ave. #3", 1) + phrasefilter.run("House of the Day: 456 Plankton St. #3", 4) + phrasefilter.run("Coop of the Day: 321 Pacific St.", 2) + phrasefilter.run("Streetlevel: 393 Pacific St.", 11) + phrasefilter.run("Coop of the Day: 456 Jefferson Ave.", 3) + phrasefilter.run("Extra, Extra", 5) + phrasefilter.run("Extra, Extra", 6) + phrasefilter.run("Early Addition", 7) + phrasefilter.run("Early Addition", 8) + phrasefilter.run("Development Watch", 9) + phrasefilter.run("Streetlevel", 10) + phrasefilter.pare_phrases() phrasefilter.print_phrases() - \ No newline at end of file diff --git a/apps/analyzer/tasks.py b/apps/analyzer/tasks.py index c41736d12d..5741e15a90 100644 --- a/apps/analyzer/tasks.py +++ b/apps/analyzer/tasks.py @@ -1,12 +1,12 @@ from newsblur_web.celeryapp import app from utils import log as logging + @app.task() def EmailPopularityQuery(pk): from apps.analyzer.models import MPopularityQuery - + query = MPopularityQuery.objects.get(pk=pk) logging.debug(" -> ~BB~FCRunning popularity query: ~SB%s" % query) - + query.send_email() - diff --git a/apps/analyzer/tests.py b/apps/analyzer/tests.py index a69739247d..ac1f3a1cf0 100644 --- a/apps/analyzer/tests.py +++ b/apps/analyzer/tests.py @@ -2,6 +2,7 @@ from apps.rss_feeds.models import MStory from django.test import TestCase from django.core import management + # from apps.analyzer.classifier import FisherClassifier import nltk from itertools import groupby @@ -11,16 +12,17 @@ class QuadgramCollocationFinder(nltk.collocations.AbstractCollocationFinder): - """A tool for the finding and ranking of quadgram collocations or other association measures. + """A tool for the finding and ranking of quadgram collocations or other association measures. It is often useful to use from_words() rather thanconstructing an instance directly. """ + def __init__(self, word_fd, quadgram_fd, trigram_fd, bigram_fd, wildcard_fd): """Construct a TrigramCollocationFinder, given FreqDists for appearances of words, bigrams, two words with any word between them,and trigrams.""" nltk.collocations.AbstractCollocationFinder.__init__(self, word_fd, quadgram_fd) self.trigram_fd = trigram_fd self.bigram_fd = bigram_fd self.wildcard_fd = wildcard_fd - + @classmethod def from_words(cls, words): wfd = nltk.probability.FreqDist() @@ -28,20 +30,20 @@ def from_words(cls, words): tfd = nltk.probability.FreqDist() bfd = nltk.probability.FreqDist() wildfd = nltk.probability.FreqDist() - - for w1, w2, w3 ,w4 in nltk.util.ingrams(words, 4, pad_right=True): + + for w1, w2, w3, w4 in nltk.util.ingrams(words, 4, pad_right=True): wfd.inc(w1) if w4 is None: continue else: - qfd.inc((w1,w2,w3,w4)) - bfd.inc((w1,w2)) - tfd.inc((w1,w2,w3)) - wildfd.inc((w1,w3,w4)) - wildfd.inc((w1,w2,w4)) - + qfd.inc((w1, w2, w3, w4)) + bfd.inc((w1, w2)) + tfd.inc((w1, w2, w3)) + wildfd.inc((w1, w3, w4)) + wildfd.inc((w1, w2, w4)) + return cls(wfd, qfd, tfd, bfd, wildfd) - + def score_ngram(self, score_fn, w1, w2, w3, w4): n_all = self.word_fd.N() n_iiii = self.ngram_fd[(w1, w2, w3, w4)] @@ -59,63 +61,78 @@ def score_ngram(self, score_fn, w1, w2, w3, w4): n_xixi = self.trigram_fd[(w2, w3)] n_xxii = self.trigram_fd[(w3, w4)] n_xxxi = self.trigram_fd[(w3, w4)] - return score_fn(n_iiii, - (n_iiix, n_iixi, n_ixii, n_xiii), - (n_iixx, n_ixix, n_ixxi, n_ixxx), - (n_xiix, n_xixi, n_xxii, n_xxxi), - n_all) + return score_fn( + n_iiii, + (n_iiix, n_iixi, n_ixii, n_xiii), + (n_iixx, n_ixix, n_ixxi, n_ixxx), + (n_xiix, n_xixi, n_xxii, n_xxxi), + n_all, + ) + - class CollocationTest(TestCase): - - fixtures = ['brownstoner.json'] - + fixtures = ["brownstoner.json"] + def setUp(self): self.client = Client() - + def test_bigrams(self): # bigram_measures = nltk.collocations.BigramAssocMeasures() trigram_measures = nltk.collocations.TrigramAssocMeasures() tokens = [ - 'Co-op', 'of', 'the', 'day', - 'House', 'of', 'the', 'day', - 'Condo', 'of', 'the', 'day', - 'Development', 'Watch', - 'Co-op', 'of', 'the', 'day', + "Co-op", + "of", + "the", + "day", + "House", + "of", + "the", + "day", + "Condo", + "of", + "the", + "day", + "Development", + "Watch", + "Co-op", + "of", + "the", + "day", ] finder = nltk.collocations.TrigramCollocationFinder.from_words(tokens) - + finder.apply_freq_filter(2) - + # return the 10 n-grams with the highest PMI print(finder.nbest(trigram_measures.pmi, 10)) titles = [ - 'Co-op of the day', - 'Condo of the day', - 'Co-op of the day', - 'House of the day', - 'Development Watch', - 'Streetlevel', + "Co-op of the day", + "Condo of the day", + "Co-op of the day", + "House of the day", + "Development Watch", + "Streetlevel", ] - tokens = nltk.tokenize.word(' '.join(titles)) + tokens = nltk.tokenize.word(" ".join(titles)) ngrams = nltk.ngrams(tokens, 4) d = [key for key, group in groupby(sorted(ngrams)) if len(list(group)) >= 2] print(d) + class ClassifierTest(TestCase): - - fixtures = ['classifiers.json', 'brownstoner.json'] - + fixtures = ["classifiers.json", "brownstoner.json"] + def setUp(self): self.client = Client() - # + + # # def test_filter(self): # user = User.objects.all() # feed = Feed.objects.all() - # + # # management.call_command('loaddata', 'brownstoner.json', verbosity=0) # response = self.client.get('/reader/refresh_feed', { "feed_id": 1, "force": True }) # management.call_command('loaddata', 'brownstoner2.json', verbosity=0) @@ -124,28 +141,32 @@ def setUp(self): # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) # management.call_command('loaddata', 'gothamist2.json', verbosity=0) # response = self.client.get('/reader/refresh_feed', { "feed_id": 4, "force": True }) - # + # # stories = Story.objects.filter(story_feed=feed[1]).order_by('-story_date')[:100] - # + # # phrasefilter = PhraseFilter() # for story in stories: # # print story.story_title, story.id # phrasefilter.run(story.story_title, story.id) - # + # # phrasefilter.pare_phrases() # phrasefilter.print_phrases() - # + # def test_train(self): # user = User.objects.all() # feed = Feed.objects.all() - - management.call_command('loaddata', 'brownstoner.json', verbosity=0, commit=False, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) - management.call_command('loaddata', 'brownstoner2.json', verbosity=0, commit=False, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False) - + + management.call_command("loaddata", "brownstoner.json", verbosity=0, commit=False, skip_checks=False) + management.call_command( + "refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False + ) + management.call_command("loaddata", "brownstoner2.json", verbosity=0, commit=False, skip_checks=False) + management.call_command( + "refresh_feed", force=1, feed=1, single_threaded=True, daemonize=False, skip_checks=False + ) + stories = MStory.objects(story_feed_id=1)[:53] - + phrasefilter = PhraseFilter() for story in stories: # print story.story_title, story.id @@ -154,46 +175,45 @@ def test_train(self): phrasefilter.pare_phrases() phrases = phrasefilter.get_phrases() print(phrases) - + tokenizer = Tokenizer(phrases) - classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases) - - classifier.train('good', 'House of the Day: 393 Pacific St.') - classifier.train('good', 'House of the Day: 393 Pacific St.') - classifier.train('good', 'Condo of the Day: 393 Pacific St.') - classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3') - classifier.train('good', 'Co-op of the Day: 393 Pacific St. #3') - classifier.train('good', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Development Watch: 393 Pacific St. #3') - classifier.train('bad', 'Streetlevel: 393 Pacific St. #3') - - guess = dict(classifier.guess('Co-op of the Day: 413 Atlantic')) - self.assertTrue(guess['good'] > .99) - self.assertTrue('bad' not in guess) - - guess = dict(classifier.guess('House of the Day: 413 Atlantic')) - self.assertTrue(guess['good'] > .99) - self.assertTrue('bad' not in guess) - - guess = dict(classifier.guess('Development Watch: Yatta')) - self.assertTrue(guess['bad'] > .7) - self.assertTrue(guess['good'] < .3) - - guess = dict(classifier.guess('Development Watch: 393 Pacific St.')) - self.assertTrue(guess['bad'] > .7) - self.assertTrue(guess['good'] < .3) - - guess = dict(classifier.guess('Streetlevel: 123 Carlton St.')) - self.assertTrue(guess['bad'] > .99) - self.assertTrue('good' not in guess) - - guess = classifier.guess('Extra, Extra') - self.assertTrue('bad' not in guess) - self.assertTrue('good' not in guess) - - guess = classifier.guess('Nothing doing: 393 Pacific St.') - self.assertTrue('bad' not in guess) - self.assertTrue('good' not in guess) - \ No newline at end of file + classifier = Bayes(tokenizer) # FisherClassifier(user[0], feed[0], phrases) + + classifier.train("good", "House of the Day: 393 Pacific St.") + classifier.train("good", "House of the Day: 393 Pacific St.") + classifier.train("good", "Condo of the Day: 393 Pacific St.") + classifier.train("good", "Co-op of the Day: 393 Pacific St. #3") + classifier.train("good", "Co-op of the Day: 393 Pacific St. #3") + classifier.train("good", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Development Watch: 393 Pacific St. #3") + classifier.train("bad", "Streetlevel: 393 Pacific St. #3") + + guess = dict(classifier.guess("Co-op of the Day: 413 Atlantic")) + self.assertTrue(guess["good"] > 0.99) + self.assertTrue("bad" not in guess) + + guess = dict(classifier.guess("House of the Day: 413 Atlantic")) + self.assertTrue(guess["good"] > 0.99) + self.assertTrue("bad" not in guess) + + guess = dict(classifier.guess("Development Watch: Yatta")) + self.assertTrue(guess["bad"] > 0.7) + self.assertTrue(guess["good"] < 0.3) + + guess = dict(classifier.guess("Development Watch: 393 Pacific St.")) + self.assertTrue(guess["bad"] > 0.7) + self.assertTrue(guess["good"] < 0.3) + + guess = dict(classifier.guess("Streetlevel: 123 Carlton St.")) + self.assertTrue(guess["bad"] > 0.99) + self.assertTrue("good" not in guess) + + guess = classifier.guess("Extra, Extra") + self.assertTrue("bad" not in guess) + self.assertTrue("good" not in guess) + + guess = classifier.guess("Nothing doing: 393 Pacific St.") + self.assertTrue("bad" not in guess) + self.assertTrue("good" not in guess) diff --git a/apps/analyzer/tfidf.py b/apps/analyzer/tfidf.py index 08fe0e4f0e..1bb015efe6 100755 --- a/apps/analyzer/tfidf.py +++ b/apps/analyzer/tfidf.py @@ -9,6 +9,7 @@ import sys import os + class tfidf: def __init__(self): self.weighted = False @@ -19,7 +20,7 @@ def addDocument(self, doc_name, list_of_words): # building a dictionary doc_dict = {} for w in list_of_words: - doc_dict[w] = doc_dict.get(w, 0.) + 1.0 + doc_dict[w] = doc_dict.get(w, 0.0) + 1.0 self.corpus_dict[w] = self.corpus_dict.get(w, 0.0) + 1.0 # normalizing the dictionary @@ -53,4 +54,4 @@ def similarities(self, list_of_words): score += (query_dict[k] / self.corpus_dict[k]) + (doc_dict[k] / self.corpus_dict[k]) sims.append([doc[0], score]) - return sims \ No newline at end of file + return sims diff --git a/apps/analyzer/tokenizer.py b/apps/analyzer/tokenizer.py index 83885398b6..0ca40f7e00 100644 --- a/apps/analyzer/tokenizer.py +++ b/apps/analyzer/tokenizer.py @@ -1,28 +1,30 @@ import re + class Tokenizer: """A simple regex-based whitespace tokenizer. It expects a string and can return all tokens lower-cased or in their existing case. """ - - WORD_RE = re.compile('[^a-zA-Z-]+') + + WORD_RE = re.compile("[^a-zA-Z-]+") def __init__(self, phrases, lower=False): self.phrases = phrases self.lower = lower - + def tokenize(self, doc): print(doc) - formatted_doc = ' '.join(self.WORD_RE.split(doc)) + formatted_doc = " ".join(self.WORD_RE.split(doc)) print(formatted_doc) for phrase in self.phrases: if phrase in formatted_doc: yield phrase - -if __name__ == '__main__': - phrases = ['Extra Extra', 'Streetlevel', 'House of the Day'] + + +if __name__ == "__main__": + phrases = ["Extra Extra", "Streetlevel", "House of the Day"] tokenizer = Tokenizer(phrases) - doc = 'Extra, Extra' - tokenizer.tokenize(doc) \ No newline at end of file + doc = "Extra, Extra" + tokenizer.tokenize(doc) diff --git a/apps/analyzer/urls.py b/apps/analyzer/urls.py index 3812755628..ed6580c67e 100644 --- a/apps/analyzer/urls.py +++ b/apps/analyzer/urls.py @@ -2,8 +2,8 @@ from apps.analyzer import views urlpatterns = [ - url(r'^$', views.index), - url(r'^save/?', views.save_classifier), - url(r'^popularity/?', views.popularity_query), - url(r'^(?P\d+)', views.get_classifiers_feed), + url(r"^$", views.index), + url(r"^save/?", views.save_classifier), + url(r"^popularity/?", views.popularity_query), + url(r"^(?P\d+)", views.get_classifiers_feed), ] diff --git a/apps/analyzer/views.py b/apps/analyzer/views.py index d06b72625b..8ea2825658 100644 --- a/apps/analyzer/views.py +++ b/apps/analyzer/views.py @@ -15,34 +15,38 @@ from utils.user_functions import get_user from utils.user_functions import ajax_login_required + def index(requst): pass - + + @require_POST @ajax_login_required @json.json_view def save_classifier(request): post = request.POST - feed_id = post['feed_id'] + feed_id = post["feed_id"] feed = None social_user_id = None - if feed_id.startswith('social:'): - social_user_id = int(feed_id.replace('social:', '')) + if feed_id.startswith("social:"): + social_user_id = int(feed_id.replace("social:", "")) feed_id = None else: feed_id = int(feed_id) feed = get_object_or_404(Feed, pk=feed_id) code = 0 - message = 'OK' + message = "OK" payload = {} logging.user(request, "~FGSaving classifier: ~SB%s~SN ~FW%s" % (feed, post)) - + # Mark subscription as dirty, so unread counts can be recalculated usersub = None socialsub = None if social_user_id: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, subscription_user_id=social_user_id) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() @@ -55,31 +59,31 @@ def save_classifier(request): usersub.needs_unread_recalc = True usersub.is_trained = True usersub.save() - - + def _save_classifier(ClassifierCls, content_type): classifiers = { - 'like_'+content_type: 1, - 'dislike_'+content_type: -1, - 'remove_like_'+content_type: 0, - 'remove_dislike_'+content_type: 0, + "like_" + content_type: 1, + "dislike_" + content_type: -1, + "remove_like_" + content_type: 0, + "remove_dislike_" + content_type: 0, } for opinion, score in classifiers.items(): if opinion in post: post_contents = post.getlist(opinion) for post_content in post_contents: - if not post_content: continue + if not post_content: + continue classifier_dict = { - 'user_id': request.user.pk, - 'feed_id': feed_id or 0, - 'social_user_id': social_user_id or 0, + "user_id": request.user.pk, + "feed_id": feed_id or 0, + "social_user_id": social_user_id or 0, } - if content_type in ('author', 'tag', 'title'): + if content_type in ("author", "tag", "title"): max_length = ClassifierCls._fields[content_type].max_length classifier_dict.update({content_type: post_content[:max_length]}) - if content_type == 'feed': - if not post_content.startswith('social:'): - classifier_dict['feed_id'] = post_content + if content_type == "feed": + if not post_content.startswith("social:"): + classifier_dict["feed_id"] = post_content try: classifier = ClassifierCls.objects.get(**classifier_dict) except ClassifierCls.DoesNotExist: @@ -94,59 +98,77 @@ def _save_classifier(ClassifierCls, content_type): classifier.delete() elif classifier.score != score: if score == 0: - if ((classifier.score == 1 and opinion.startswith('remove_like')) - or (classifier.score == -1 and opinion.startswith('remove_dislike'))): + if (classifier.score == 1 and opinion.startswith("remove_like")) or ( + classifier.score == -1 and opinion.startswith("remove_dislike") + ): classifier.delete() else: classifier.score = score classifier.save() - - _save_classifier(MClassifierAuthor, 'author') - _save_classifier(MClassifierTag, 'tag') - _save_classifier(MClassifierTitle, 'title') - _save_classifier(MClassifierFeed, 'feed') + + _save_classifier(MClassifierAuthor, "author") + _save_classifier(MClassifierTag, "tag") + _save_classifier(MClassifierTitle, "title") + _save_classifier(MClassifierFeed, "feed") r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) response = dict(code=code, message=message, payload=payload) return response - + + @json.json_view def get_classifiers_feed(request, feed_id): user = get_user(request) code = 0 - + payload = get_classifiers_for_user(user, feed_id=feed_id) - + response = dict(code=code, payload=payload) - + return response + def popularity_query(request): - if request.method == 'POST': + if request.method == "POST": form = PopularityQueryForm(request.POST) if form.is_valid(): - logging.user(request.user, "~BC~FRPopularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) - query = MPopularityQuery.objects.create(email=request.POST['email'], - query=request.POST['query']) + logging.user( + request.user, + '~BC~FRPopularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"' + % (request.POST["email"], request.POST["query"]), + ) + query = MPopularityQuery.objects.create(email=request.POST["email"], query=request.POST["query"]) query.queue_email() - - response = render(request, 'analyzer/popularity_query.xhtml', { - 'success': True, - 'popularity_query_form': form, - }) - response.set_cookie('newsblur_popularity_query', request.POST['query']) - + + response = render( + request, + "analyzer/popularity_query.xhtml", + { + "success": True, + "popularity_query_form": form, + }, + ) + response.set_cookie("newsblur_popularity_query", request.POST["query"]) + return response else: - logging.user(request.user, "~BC~FRFailed popularity query: ~SB%s~SN requests \"~SB~FM%s~SN~FR\"" % (request.POST['email'], request.POST['query'])) + logging.user( + request.user, + '~BC~FRFailed popularity query: ~SB%s~SN requests "~SB~FM%s~SN~FR"' + % (request.POST["email"], request.POST["query"]), + ) else: logging.user(request.user, "~BC~FRPopularity query form loading") - form = PopularityQueryForm(initial={'query': request.COOKIES.get('newsblur_popularity_query', "")}) - - response = render(request, 'analyzer/popularity_query.xhtml', { - 'popularity_query_form': form, - }) + form = PopularityQueryForm(initial={"query": request.COOKIES.get("newsblur_popularity_query", "")}) + + response = render( + request, + "analyzer/popularity_query.xhtml", + { + "popularity_query_form": form, + }, + ) return response diff --git a/apps/api/tests.py b/apps/api/tests.py index c7c4668e12..f51d798ffd 100644 --- a/apps/api/tests.py +++ b/apps/api/tests.py @@ -7,6 +7,7 @@ from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ def test_basic_addition(self): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/api/urls.py b/apps/api/urls.py index bd4b6d43f7..3876b24e2f 100644 --- a/apps/api/urls.py +++ b/apps/api/urls.py @@ -2,18 +2,18 @@ from apps.api import views urlpatterns = [ - url(r'^logout', views.logout, name='api-logout'), - url(r'^login', views.login, name='api-login'), - url(r'^signup', views.signup, name='api-signup'), - url(r'^add_site_load_script/(?P\w+)', views.add_site_load_script, name='api-add-site-load-script'), - url(r'^add_site/(?P\w+)', views.add_site, name='api-add-site'), - url(r'^add_url/(?P\w+)', views.add_site, name='api-add-site'), - url(r'^add_site/?$', views.add_site_authed, name='api-add-site-authed'), - url(r'^add_url/?$', views.add_site_authed, name='api-add-site-authed'), - url(r'^check_share_on_site/(?P\w+)', views.check_share_on_site, name='api-check-share-on-site'), - url(r'^share_story/(?P\w+)', views.share_story, name='api-share-story'), - url(r'^save_story/(?P\w+)', views.save_story, name='api-save-story'), - url(r'^share_story/?$', views.share_story), - url(r'^save_story/?$', views.save_story), - url(r'^ip_addresses/?$', views.ip_addresses), + url(r"^logout", views.logout, name="api-logout"), + url(r"^login", views.login, name="api-login"), + url(r"^signup", views.signup, name="api-signup"), + url(r"^add_site_load_script/(?P\w+)", views.add_site_load_script, name="api-add-site-load-script"), + url(r"^add_site/(?P\w+)", views.add_site, name="api-add-site"), + url(r"^add_url/(?P\w+)", views.add_site, name="api-add-site"), + url(r"^add_site/?$", views.add_site_authed, name="api-add-site-authed"), + url(r"^add_url/?$", views.add_site_authed, name="api-add-site-authed"), + url(r"^check_share_on_site/(?P\w+)", views.check_share_on_site, name="api-check-share-on-site"), + url(r"^share_story/(?P\w+)", views.share_story, name="api-share-story"), + url(r"^save_story/(?P\w+)", views.save_story, name="api-save-story"), + url(r"^share_story/?$", views.share_story), + url(r"^save_story/?$", views.save_story), + url(r"^ip_addresses/?$", views.ip_addresses), ] diff --git a/apps/api/views.py b/apps/api/views.py index e092301138..ad89693bc2 100644 --- a/apps/api/views.py +++ b/apps/api/views.py @@ -29,10 +29,10 @@ def login(request): code = -1 errors = None - user_agent = request.environ.get('HTTP_USER_AGENT', '') - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + user_agent = request.environ.get("HTTP_USER_AGENT", "") + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] - if not user_agent or user_agent.lower() in ['nativehost']: + if not user_agent or user_agent.lower() in ["nativehost"]: errors = dict(user_agent="You must set a user agent to login.") logging.user(request, "~FG~BB~SK~FRBlocked ~FGAPI Login~SN~FW: %s / %s" % (user_agent, ip)) elif request.method == "POST": @@ -40,19 +40,20 @@ def login(request): if form.errors: errors = form.errors if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") logging.user(request, "~FG~BB~SKAPI Login~SN~FW: %s / %s" % (user_agent, ip)) code = 1 else: errors = dict(method="Invalid method. Use POST. You used %s" % request.method) - + return dict(code=code, errors=errors) - + + @json.json_view def signup(request): code = -1 errors = None - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] if request.method == "POST": form = SignupForm(data=request.POST) @@ -61,48 +62,47 @@ def signup(request): if form.is_valid(): try: new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(request, "~FG~SB~BBAPI NEW SIGNUP: ~FW%s / %s" % (new_user.email, ip)) code = 1 except forms.ValidationError as e: errors = [e.args[0]] else: errors = dict(method="Invalid method. Use POST. You used %s" % request.method) - return dict(code=code, errors=errors) - + + @json.json_view def logout(request): code = 1 logging.user(request, "~FG~BBAPI Logout~FW") logout_user(request) - + return dict(code=code) + def add_site_load_script(request, token): code = 0 usf = None profile = None user_profile = None starred_counts = {} - - def image_base64(image_name, path='icons/circular/'): - image_file = open(os.path.join(settings.MEDIA_ROOT, 'img/%s%s' % (path, image_name)), 'rb') - return base64.b64encode(image_file.read()).decode('utf-8') - - accept_image = image_base64('newuser_icn_setup.png') - error_image = image_base64('newuser_icn_sharewith_active.png') - new_folder_image = image_base64('g_icn_arrow_right.png') - add_image = image_base64('g_icn_expand_hover.png') + + def image_base64(image_name, path="icons/circular/"): + image_file = open(os.path.join(settings.MEDIA_ROOT, "img/%s%s" % (path, image_name)), "rb") + return base64.b64encode(image_file.read()).decode("utf-8") + + accept_image = image_base64("newuser_icn_setup.png") + error_image = image_base64("newuser_icn_sharewith_active.png") + new_folder_image = image_base64("g_icn_arrow_right.png") + add_image = image_base64("g_icn_expand_hover.png") try: profiles = Profile.objects.filter(secret_token=token) if profiles: profile = profiles[0] - usf = UserSubscriptionFolders.objects.get( - user=profile.user - ) + usf = UserSubscriptionFolders.objects.get(user=profile.user) user_profile = MSocialProfile.get_user(user_id=profile.user.pk) starred_counts = MStarredStoryCounts.user_counts(profile.user.pk) else: @@ -111,29 +111,34 @@ def image_base64(image_name, path='icons/circular/'): code = -1 except UserSubscriptionFolders.DoesNotExist: code = -1 - - return render(request, 'api/share_bookmarklet.js', { - 'code': code, - 'token': token, - 'folders': (usf and usf.folders) or [], - 'user': profile and profile.user or {}, - 'user_profile': user_profile and json.encode(user_profile.canonical()) or {}, - 'starred_counts': json.encode(starred_counts), - 'accept_image': accept_image, - 'error_image': error_image, - 'add_image': add_image, - 'new_folder_image': new_folder_image, - }, - content_type='application/javascript') + + return render( + request, + "api/share_bookmarklet.js", + { + "code": code, + "token": token, + "folders": (usf and usf.folders) or [], + "user": profile and profile.user or {}, + "user_profile": user_profile and json.encode(user_profile.canonical()) or {}, + "starred_counts": json.encode(starred_counts), + "accept_image": accept_image, + "error_image": error_image, + "add_image": add_image, + "new_folder_image": new_folder_image, + }, + content_type="application/javascript", + ) + def add_site(request, token): - code = 0 - get_post = getattr(request, request.method) - url = get_post.get('url') - folder = get_post.get('folder') - new_folder = get_post.get('new_folder') - callback = get_post.get('callback', '') - + code = 0 + get_post = getattr(request, request.method) + url = get_post.get("url") + folder = get_post.get("folder") + new_folder = get_post.get("new_folder") + callback = get_post.get("callback", "") + if not url: code = -1 else: @@ -144,35 +149,40 @@ def add_site(request, token): usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( - user=profile.user, - feed_address=url, - folder=folder, - bookmarklet=True + user=profile.user, feed_address=url, folder=folder, bookmarklet=True ) except Profile.DoesNotExist: code = -1 - + if code > 0: - message = 'OK' - - logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), - request=request) - - return HttpResponse(callback + '(' + json.encode({ - 'code': code, - 'message': message, - 'usersub': us and us.feed_id, - }) + ')', content_type='text/plain') + message = "OK" + + logging.user(profile.user, "~FRAdding URL from site: ~SB%s (in %s)" % (url, folder), request=request) + + return HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "usersub": us and us.feed_id, + } + ) + + ")", + content_type="text/plain", + ) + @ajax_login_required def add_site_authed(request): - code = 0 - url = request.GET['url'] - folder = request.GET['folder'] - new_folder = request.GET.get('new_folder') - callback = request.GET['callback'] - user = get_user(request) - + code = 0 + url = request.GET["url"] + folder = request.GET["folder"] + new_folder = request.GET.get("new_folder") + callback = request.GET["callback"] + user = get_user(request) + if not url: code = -1 else: @@ -181,40 +191,45 @@ def add_site_authed(request): usf.add_folder(folder, new_folder) folder = new_folder code, message, us = UserSubscription.add_subscription( - user=user, - feed_address=url, - folder=folder, - bookmarklet=True + user=user, feed_address=url, folder=folder, bookmarklet=True ) - + if code > 0: - message = 'OK' - - logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), - request=request) - - return HttpResponse(callback + '(' + json.encode({ - 'code': code, - 'message': message, - 'usersub': us and us.feed_id, - }) + ')', content_type='text/plain') + message = "OK" + + logging.user(user, "~FRAdding authed URL from site: ~SB%s (in %s)" % (url, folder), request=request) + + return HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "usersub": us and us.feed_id, + } + ) + + ")", + content_type="text/plain", + ) + def check_share_on_site(request, token): - code = 0 - story_url = request.GET['story_url'] - rss_url = request.GET.get('rss_url') - callback = request.GET['callback'] + code = 0 + story_url = request.GET["story_url"] + rss_url = request.GET.get("rss_url") + callback = request.GET["callback"] other_stories = None same_stories = None - usersub = None - message = None - user = None + usersub = None + message = None + user = None users = {} your_story = None same_stories = None other_stories = None previous_stories = None - + if not story_url: code = -1 else: @@ -223,7 +238,7 @@ def check_share_on_site(request, token): user = user_profile.user except Profile.DoesNotExist: code = -1 - + logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % rss_url) feed = Feed.get_feed_from_url(rss_url, create=False, fetch=False) if not feed: @@ -239,9 +254,9 @@ def check_share_on_site(request, token): logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % base_url) feed = Feed.get_feed_from_url(base_url, create=False, fetch=False) if not feed: - logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + '/')) - feed = Feed.get_feed_from_url(base_url+'/', create=False, fetch=False) - + logging.user(request.user, "~FBFinding feed (check_share_on_site): %s" % (base_url + "/")) + feed = Feed.get_feed_from_url(base_url + "/", create=False, fetch=False) + if feed and user: try: usersub = UserSubscription.objects.filter(user=user, feed=feed) @@ -249,23 +264,27 @@ def check_share_on_site(request, token): usersub = None if user: feed_id = feed and feed.pk - your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site(feed_id, - user_id=user.pk, story_url=story_url) - previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by('-shared_date').limit(3) - previous_stories = [{ - "user_id": story.user_id, - "story_title": story.story_title, - "comments": story.comments, - "shared_date": story.shared_date, - "relative_date": relative_timesince(story.shared_date), - "blurblog_permalink": story.blurblog_permalink(), - } for story in previous_stories] - + your_story, same_stories, other_stories = MSharedStory.get_shared_stories_from_site( + feed_id, user_id=user.pk, story_url=story_url + ) + previous_stories = MSharedStory.objects.filter(user_id=user.pk).order_by("-shared_date").limit(3) + previous_stories = [ + { + "user_id": story.user_id, + "story_title": story.story_title, + "comments": story.comments, + "shared_date": story.shared_date, + "relative_date": relative_timesince(story.shared_date), + "blurblog_permalink": story.blurblog_permalink(), + } + for story in previous_stories + ] + user_ids = set([user_profile.user.pk]) for story in same_stories: - user_ids.add(story['user_id']) + user_ids.add(story["user_id"]) for story in other_stories: - user_ids.add(story['user_id']) + user_ids.add(story["user_id"]) profiles = MSocialProfile.profiles(user_ids) for profile in profiles: @@ -273,39 +292,47 @@ def check_share_on_site(request, token): "username": profile.username, "photo_url": profile.photo_url, } - - logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), - request=request) - - response = HttpResponse(callback + '(' + json.encode({ - 'code' : code, - 'message' : message, - 'feed' : feed, - 'subscribed' : bool(usersub), - 'your_story' : your_story, - 'same_stories' : same_stories, - 'other_stories' : other_stories, - 'previous_stories' : previous_stories, - 'users' : users, - }) + ')', content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'GET' - + + logging.user(user, "~BM~FCChecking share from site: ~SB%s" % (story_url), request=request) + + response = HttpResponse( + callback + + "(" + + json.encode( + { + "code": code, + "message": message, + "feed": feed, + "subscribed": bool(usersub), + "your_story": your_story, + "same_stories": same_stories, + "other_stories": other_stories, + "previous_stories": previous_stories, + "users": users, + } + ) + + ")", + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "GET" + return response -@required_params('story_url') + +@required_params("story_url") def share_story(request, token=None): - code = 0 - story_url = request.POST['story_url'] - comments = request.POST.get('comments', "") - title = request.POST.get('title', None) - content = request.POST.get('content', None) - rss_url = request.POST.get('rss_url', None) - feed_id = request.POST.get('feed_id', None) or 0 - feed = None - message = None - profile = None - + code = 0 + story_url = request.POST["story_url"] + comments = request.POST.get("comments", "") + title = request.POST.get("title", None) + content = request.POST.get("content", None) + rss_url = request.POST.get("rss_url", None) + feed_id = request.POST.get("feed_id", None) or 0 + feed = None + message = None + profile = None + if request.user.is_authenticated: profile = request.user.profile else: @@ -317,14 +344,19 @@ def share_story(request, token=None): message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." - + if not profile: - return HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': None, - }), content_type='text/plain') - + return HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": None, + } + ), + content_type="text/plain", + ) + if feed_id: feed = Feed.get_by_id(feed_id) else: @@ -336,7 +368,7 @@ def share_story(request, token=None): feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk - + if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) @@ -346,13 +378,15 @@ def share_story(request, token=None): importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) if not content: - content = document['content'] + content = document["content"] if not title: - title = document['title'] - - shared_story = MSharedStory.objects.filter(user_id=profile.user.pk, - story_feed_id=feed_id, - story_guid=story_url).limit(1).first() + title = document["title"] + + shared_story = ( + MSharedStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url) + .limit(1) + .first() + ) if not shared_story: story_db = { "story_guid": story_url, @@ -361,7 +395,6 @@ def share_story(request, token=None): "story_feed_id": feed_id, "story_content": content, "story_date": datetime.datetime.now(), - "user_id": profile.user.pk, "comments": comments, "has_comments": bool(comments), @@ -382,49 +415,57 @@ def share_story(request, token=None): shared_story.has_comments = bool(comments) shared_story.story_feed_id = feed_id shared_story.save() - logging.user(profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments)) + logging.user( + profile.user, "~BM~FY~SBUpdating~SN shared story from site: ~SB%s: %s" % (story_url, comments) + ) message = "Updating shared story from site: %s: %s" % (story_url, comments) try: - socialsub = MSocialSubscription.objects.get(user_id=profile.user.pk, - subscription_user_id=profile.user.pk) + socialsub = MSocialSubscription.objects.get( + user_id=profile.user.pk, subscription_user_id=profile.user.pk + ) except MSocialSubscription.DoesNotExist: socialsub = None - + if socialsub: - socialsub.mark_story_ids_as_read([shared_story.story_hash], - shared_story.story_feed_id, - request=request) + socialsub.mark_story_ids_as_read( + [shared_story.story_hash], shared_story.story_feed_id, request=request + ) else: RUserStory.mark_read(profile.user.pk, shared_story.story_feed_id, shared_story.story_hash) - shared_story.publish_update_to_subscribers() - - response = HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': shared_story, - }), content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'POST' - + + response = HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": shared_story, + } + ), + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "POST" + return response -@required_params('story_url', 'title') + +@required_params("story_url", "title") def save_story(request, token=None): - code = 0 - story_url = request.POST['story_url'] - user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') or [] - add_user_tag = request.POST.get('add_user_tag', None) - title = request.POST['title'] - content = request.POST.get('content', None) - rss_url = request.POST.get('rss_url', None) - user_notes = request.POST.get('user_notes', None) - feed_id = request.POST.get('feed_id', None) or 0 - feed = None - message = None - profile = None - + code = 0 + story_url = request.POST["story_url"] + user_tags = request.POST.getlist("user_tags") or request.POST.getlist("user_tags[]") or [] + add_user_tag = request.POST.get("add_user_tag", None) + title = request.POST["title"] + content = request.POST.get("content", None) + rss_url = request.POST.get("rss_url", None) + user_notes = request.POST.get("user_notes", None) + feed_id = request.POST.get("feed_id", None) or 0 + feed = None + message = None + profile = None + if request.user.is_authenticated: profile = request.user.profile else: @@ -436,14 +477,19 @@ def save_story(request, token=None): message = "Not authenticated, couldn't find user by token." else: message = "Not authenticated, no token supplied and not authenticated." - + if not profile: - return HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': None, - }), content_type='text/plain') - + return HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": None, + } + ), + content_type="text/plain", + ) + if feed_id: feed = Feed.get_by_id(feed_id) else: @@ -455,7 +501,7 @@ def save_story(request, token=None): feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) if feed: feed_id = feed.pk - + if content: content = lxml.html.fromstring(content) content.make_links_absolute(story_url) @@ -463,16 +509,18 @@ def save_story(request, token=None): else: importer = TextImporter(story=None, story_url=story_url, request=request, debug=settings.DEBUG) document = importer.fetch(skip_save=True, return_document=True) - content = document['content'] + content = document["content"] if not title: - title = document['title'] - + title = document["title"] + if add_user_tag: - user_tags = user_tags + [tag for tag in add_user_tag.split(',')] - - starred_story = MStarredStory.objects.filter(user_id=profile.user.pk, - story_feed_id=feed_id, - story_guid=story_url).limit(1).first() + user_tags = user_tags + [tag for tag in add_user_tag.split(",")] + + starred_story = ( + MStarredStory.objects.filter(user_id=profile.user.pk, story_feed_id=feed_id, story_guid=story_url) + .limit(1) + .first() + ) if not starred_story: story_db = { "story_guid": story_url, @@ -498,26 +546,34 @@ def save_story(request, token=None): starred_story.story_feed_id = feed_id starred_story.user_notes = user_notes starred_story.save() - logging.user(profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags)) + logging.user( + profile.user, "~BM~FC~SBUpdating~SN starred story from site: ~SB%s: %s" % (story_url, user_tags) + ) message = "Updating saved story from site: %s: %s" % (story_url, user_tags) MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) - - response = HttpResponse(json.encode({ - 'code': code, - 'message': message, - 'story': starred_story, - }), content_type='text/plain') - response['Access-Control-Allow-Origin'] = '*' - response['Access-Control-Allow-Methods'] = 'POST' - + + response = HttpResponse( + json.encode( + { + "code": code, + "message": message, + "story": starred_story, + } + ), + content_type="text/plain", + ) + response["Access-Control-Allow-Origin"] = "*" + response["Access-Control-Allow-Methods"] = "POST" + return response + def ip_addresses(request): # Read local file /srv/newsblur/apps/api/ip_addresses.txt and return that - with open('/srv/newsblur/apps/api/ip_addresses.txt', 'r') as f: + with open("/srv/newsblur/apps/api/ip_addresses.txt", "r") as f: addresses = f.read() mail_admins(f"IP Addresses accessed from {request.META['REMOTE_ADDR']} by {request.user}", addresses) - return HttpResponse(addresses, content_type='text/plain') + return HttpResponse(addresses, content_type="text/plain") diff --git a/apps/categories/models.py b/apps/categories/models.py index 7afa63151a..7254c80b39 100644 --- a/apps/categories/models.py +++ b/apps/categories/models.py @@ -6,20 +6,21 @@ from utils.feed_functions import add_object_to_folder from utils import log as logging + class MCategory(mongo.Document): title = mongo.StringField() description = mongo.StringField() feed_ids = mongo.ListField(mongo.IntField()) - + meta = { - 'collection': 'category', - 'indexes': ['title'], - 'allow_inheritance': False, + "collection": "category", + "indexes": ["title"], + "allow_inheritance": False, } - + def __str__(self): return "%s: %s sites" % (self.title, len(self.feed_ids)) - + @classmethod def audit(cls): categories = cls.objects.all() @@ -39,28 +40,28 @@ def audit(cls): @classmethod def add(cls, title, description): return cls.objects.create(title=title, description=description) - + @classmethod def serialize(cls, category=None): categories = cls.objects.all() if category: categories = categories.filter(title=category) - + data = dict(categories=[], feeds={}) feed_ids = set() for category in categories: category_output = { - 'title': category.title, - 'description': category.description, - 'feed_ids': category.feed_ids, + "title": category.title, + "description": category.description, + "feed_ids": category.feed_ids, } - data['categories'].append(category_output) + data["categories"].append(category_output) feed_ids.update(list(category.feed_ids)) - + feeds = Feed.objects.filter(pk__in=feed_ids) for feed in feeds: - data['feeds'][feed.pk] = feed.canonical() - + data["feeds"][feed.pk] = feed.canonical() + return data @classmethod @@ -68,8 +69,10 @@ def reload_sites(cls, category_title=None): category_sites = MCategorySite.objects.all() if category_title: category_sites = category_sites.filter(category_title=category_title) - - category_groups = groupby(sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title) + + category_groups = groupby( + sorted(category_sites, key=lambda c: c.category_title), key=lambda c: c.category_title + ) for category_title, sites in category_groups: try: category = cls.objects.get(title=category_title) @@ -79,27 +82,26 @@ def reload_sites(cls, category_title=None): category.feed_ids = [site.feed_id for site in sites] category.save() print(" ---> Reloaded category: %s" % category) - + @classmethod def subscribe(cls, user_id, category_title): category = cls.objects.get(title=category_title) for feed_id in category.feed_ids: us, _ = UserSubscription.objects.get_or_create( - feed_id=feed_id, + feed_id=feed_id, user_id=user_id, defaults={ - 'needs_unread_recalc': True, - 'active': True, - } + "needs_unread_recalc": True, + "active": True, + }, ) - + usf, created = UserSubscriptionFolders.objects.get_or_create( - user_id=user_id, - defaults={'folders': '[]'} + user_id=user_id, defaults={"folders": "[]"} ) - - usf.add_folder('', category.title) + + usf.add_folder("", category.title) folders = json.decode(usf.folders) for feed_id in category.feed_ids: feed = Feed.get_by_id(feed_id) @@ -108,27 +110,26 @@ def subscribe(cls, user_id, category_title): folders = add_object_to_folder(feed.pk, category.title, folders) usf.folders = json.encode(folders) usf.save() - - + + class MCategorySite(mongo.Document): feed_id = mongo.IntField() category_title = mongo.StringField() - + meta = { - 'collection': 'category_site', - 'indexes': ['feed_id', 'category_title'], - 'allow_inheritance': False, + "collection": "category_site", + "indexes": ["feed_id", "category_title"], + "allow_inheritance": False, } - + def __str__(self): feed = Feed.get_by_id(self.feed_id) return "%s: %s" % (self.category_title, feed) - + @classmethod def add(cls, category_title, feed_id): - category_site, created = cls.objects.get_or_create(category_title=category_title, - feed_id=feed_id) - + category_site, created = cls.objects.get_or_create(category_title=category_title, feed_id=feed_id) + if not created: print(" ---> Site is already in category: %s" % category_site) else: diff --git a/apps/categories/urls.py b/apps/categories/urls.py index dda7b05ece..18ac85b278 100644 --- a/apps/categories/urls.py +++ b/apps/categories/urls.py @@ -2,6 +2,6 @@ from apps.categories import views urlpatterns = [ - url(r'^$', views.all_categories, name='all-categories'), - url(r'^subscribe/?$', views.subscribe, name='categories-subscribe'), + url(r"^$", views.all_categories, name="all-categories"), + url(r"^subscribe/?$", views.subscribe, name="categories-subscribe"), ] diff --git a/apps/categories/views.py b/apps/categories/views.py index 3616c62142..b3d51b320d 100644 --- a/apps/categories/views.py +++ b/apps/categories/views.py @@ -3,35 +3,42 @@ from utils import json_functions as json from utils.user_functions import ajax_login_required + @json.json_view def all_categories(request): categories = MCategory.serialize() - + return categories - + + @ajax_login_required @json.json_view def subscribe(request): user = request.user categories = MCategory.serialize() - category_titles = [c['title'] for c in categories['categories']] - subscribe_category_titles = request.POST.getlist('category') or request.POST.getlist('category[]') - + category_titles = [c["title"] for c in categories["categories"]] + subscribe_category_titles = request.POST.getlist("category") or request.POST.getlist("category[]") + invalid_category_title = False for category_title in subscribe_category_titles: if category_title not in category_titles: invalid_category_title = True - + if not subscribe_category_titles or invalid_category_title: - message = "Choose one or more of these categories: %s" % ', '.join(category_titles) + message = "Choose one or more of these categories: %s" % ", ".join(category_titles) return dict(code=-1, message=message) - + for category_title in subscribe_category_titles: MCategory.subscribe(user.pk, category_title) - + usf = UserSubscriptionFolders.objects.get(user=user.pk) - - return dict(code=1, message="Subscribed to %s %s" % ( - len(subscribe_category_titles), - 'category' if len(subscribe_category_titles) == 1 else 'categories', - ), folders=json.decode(usf.folders)) \ No newline at end of file + + return dict( + code=1, + message="Subscribed to %s %s" + % ( + len(subscribe_category_titles), + "category" if len(subscribe_category_titles) == 1 else "categories", + ), + folders=json.decode(usf.folders), + ) diff --git a/apps/feed_import/migrations/0001_initial.py b/apps/feed_import/migrations/0001_initial.py index 900e91b21e..107f7a85d5 100644 --- a/apps/feed_import/migrations/0001_initial.py +++ b/apps/feed_import/migrations/0001_initial.py @@ -7,7 +7,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -16,19 +15,30 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='OAuthToken', + name="OAuthToken", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('session_id', models.CharField(blank=True, max_length=50, null=True)), - ('uuid', models.CharField(blank=True, max_length=50, null=True)), - ('remote_ip', models.CharField(blank=True, max_length=50, null=True)), - ('request_token', models.CharField(max_length=50)), - ('request_token_secret', models.CharField(max_length=50)), - ('access_token', models.CharField(max_length=50)), - ('access_token_secret', models.CharField(max_length=50)), - ('credential', models.TextField(blank=True, null=True)), - ('created_date', models.DateTimeField(default=datetime.datetime.now)), - ('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("session_id", models.CharField(blank=True, max_length=50, null=True)), + ("uuid", models.CharField(blank=True, max_length=50, null=True)), + ("remote_ip", models.CharField(blank=True, max_length=50, null=True)), + ("request_token", models.CharField(max_length=50)), + ("request_token_secret", models.CharField(max_length=50)), + ("access_token", models.CharField(max_length=50)), + ("access_token_secret", models.CharField(max_length=50)), + ("credential", models.TextField(blank=True, null=True)), + ("created_date", models.DateTimeField(default=datetime.datetime.now)), + ( + "user", + models.OneToOneField( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/feed_import/models.py b/apps/feed_import/models.py index 6371a1ab85..ad1e15fcac 100644 --- a/apps/feed_import/models.py +++ b/apps/feed_import/models.py @@ -28,75 +28,73 @@ class OAuthToken(models.Model): access_token_secret = models.CharField(max_length=50) credential = models.TextField(null=True, blank=True) created_date = models.DateTimeField(default=datetime.datetime.now) - -class Importer: +class Importer: def clear_feeds(self): UserSubscription.objects.filter(user=self.user).delete() def clear_folders(self): UserSubscriptionFolders.objects.filter(user=self.user).delete() - + def get_folders(self): - self.usf, _ = UserSubscriptionFolders.objects.get_or_create(user=self.user, - defaults={'folders': '[]'}) + self.usf, _ = UserSubscriptionFolders.objects.get_or_create( + user=self.user, defaults={"folders": "[]"} + ) return json.decode(self.usf.folders) - + class OPMLExporter(Importer): - def __init__(self, user): self.user = user self.fetch_feeds() - + def process(self, verbose=False): now = str(datetime.datetime.now()) - root = Element('opml') - root.set('version', '1.1') - root.append(Comment('Generated by NewsBlur - newsblur.com')) - - head = SubElement(root, 'head') - title = SubElement(head, 'title') - title.text = 'NewsBlur Feeds' - dc = SubElement(head, 'dateCreated') - dc.text = now - dm = SubElement(head, 'dateModified') - dm.text = now - folders = self.get_folders() - body = SubElement(root, 'body') + root = Element("opml") + root.set("version", "1.1") + root.append(Comment("Generated by NewsBlur - newsblur.com")) + + head = SubElement(root, "head") + title = SubElement(head, "title") + title.text = "NewsBlur Feeds" + dc = SubElement(head, "dateCreated") + dc.text = now + dm = SubElement(head, "dateModified") + dm.text = now + folders = self.get_folders() + body = SubElement(root, "body") self.process_outline(body, folders, verbose=verbose) - return tostring(root, encoding='utf8', method='xml') - + return tostring(root, encoding="utf8", method="xml") + def process_outline(self, body, folders, verbose=False): for obj in folders: if isinstance(obj, int) and obj in self.feeds: feed = self.feeds[obj] if verbose: - print(" ---> Adding feed: %s - %s" % (feed['id'], - feed['feed_title'][:30])) + print(" ---> Adding feed: %s - %s" % (feed["id"], feed["feed_title"][:30])) feed_attrs = self.make_feed_row(feed) - body.append(Element('outline', feed_attrs)) + body.append(Element("outline", feed_attrs)) elif isinstance(obj, dict): for folder_title, folder_objs in list(obj.items()): if verbose: print(" ---> Adding folder: %s" % folder_title) - folder_element = Element('outline', {'text': folder_title, 'title': folder_title}) + folder_element = Element("outline", {"text": folder_title, "title": folder_title}) body.append(self.process_outline(folder_element, folder_objs, verbose=verbose)) return body - + def make_feed_row(self, feed): feed_attrs = { - 'text': feed['feed_title'], - 'title': feed['feed_title'], - 'type': 'rss', - 'version': 'RSS', - 'htmlUrl': feed['feed_link'] or "", - 'xmlUrl': feed['feed_address'] or "", + "text": feed["feed_title"], + "title": feed["feed_title"], + "type": "rss", + "version": "RSS", + "htmlUrl": feed["feed_link"] or "", + "xmlUrl": feed["feed_address"] or "", } return feed_attrs - + def fetch_feeds(self): subs = UserSubscription.objects.filter(user=self.user) self.feeds = [] @@ -113,16 +111,15 @@ def feed_count(self): class OPMLImporter(Importer): - def __init__(self, opml_xml, user): self.user = user self.opml_xml = opml_xml - + @timelimit(10) def try_processing(self): folders = self.process() return folders - + def process(self): # self.clear_feeds() @@ -136,38 +133,37 @@ def process(self): # self.clear_folders() self.usf.folders = json.encode(folders) self.usf.save() - + return folders - - def process_outline(self, outline, folders, in_folder=''): + + def process_outline(self, outline, folders, in_folder=""): for item in outline: - if (not hasattr(item, 'xmlUrl') and - (hasattr(item, 'text') or hasattr(item, 'title'))): + if not hasattr(item, "xmlUrl") and (hasattr(item, "text") or hasattr(item, "title")): folder = item - title = getattr(item, 'text', None) or getattr(item, 'title', None) + title = getattr(item, "text", None) or getattr(item, "title", None) # if hasattr(folder, 'text'): # logging.info(' ---> [%s] ~FRNew Folder: %s' % (self.user, folder.text)) obj = {title: []} folders = add_object_to_folder(obj, in_folder, folders) folders = self.process_outline(folder, folders, title) - elif hasattr(item, 'xmlUrl'): + elif hasattr(item, "xmlUrl"): feed = item - if not hasattr(feed, 'htmlUrl'): - setattr(feed, 'htmlUrl', None) + if not hasattr(feed, "htmlUrl"): + setattr(feed, "htmlUrl", None) # If feed title matches what's in the DB, don't override it on subscription. - feed_title = getattr(feed, 'title', None) or getattr(feed, 'text', None) + feed_title = getattr(feed, "title", None) or getattr(feed, "text", None) if not feed_title: - setattr(feed, 'title', feed.htmlUrl or feed.xmlUrl) + setattr(feed, "title", feed.htmlUrl or feed.xmlUrl) user_feed_title = None else: - setattr(feed, 'title', feed_title) + setattr(feed, "title", feed_title) user_feed_title = feed.title feed_address = urlnorm.normalize(feed.xmlUrl) feed_link = urlnorm.normalize(feed.htmlUrl) - if len(feed_address) > Feed._meta.get_field('feed_address').max_length: + if len(feed_address) > Feed._meta.get_field("feed_address").max_length: continue - if feed_link and len(feed_link) > Feed._meta.get_field('feed_link').max_length: + if feed_link and len(feed_link) > Feed._meta.get_field("feed_link").max_length: continue # logging.info(' ---> \t~FR%s - %s - %s' % (feed.title, feed_link, feed_address,)) feed_data = dict(feed_address=feed_address, feed_link=feed_link, feed_title=feed.title) @@ -178,32 +174,31 @@ def process_outline(self, outline, folders, in_folder=''): if duplicate_feed: feed_db = duplicate_feed[0].feed else: - feed_data['active_subscribers'] = 1 - feed_data['num_subscribers'] = 1 - feed_db, _ = Feed.find_or_create(feed_address=feed_address, - feed_link=feed_link, - defaults=dict(**feed_data)) + feed_data["active_subscribers"] = 1 + feed_data["num_subscribers"] = 1 + feed_db, _ = Feed.find_or_create( + feed_address=feed_address, feed_link=feed_link, defaults=dict(**feed_data) + ) if user_feed_title == feed_db.feed_title: user_feed_title = None - + try: - us = UserSubscription.objects.get( - feed=feed_db, - user=self.user) + us = UserSubscription.objects.get(feed=feed_db, user=self.user) except UserSubscription.DoesNotExist: us = None - + if not us: us = UserSubscription( - feed=feed_db, + feed=feed_db, user=self.user, needs_unread_recalc=True, mark_read_date=datetime.datetime.utcnow() - datetime.timedelta(days=1), active=self.user.profile.is_premium, - user_title=user_feed_title) + user_title=user_feed_title, + ) us.save() - + if self.user.profile.is_premium and not us.active: us.active = True us.save() @@ -214,25 +209,25 @@ def process_outline(self, outline, folders, in_folder=''): folders = add_object_to_folder(feed_db.pk, in_folder, folders) return folders - + def count_feeds_in_opml(self): opml_count = len(opml.from_string(self.opml_xml)) sub_count = UserSubscription.objects.filter(user=self.user).count() return max(sub_count, opml_count) - + class UploadedOPML(mongo.Document): user_id = mongo.IntField() opml_file = mongo.StringField() upload_date = mongo.DateTimeField(default=datetime.datetime.now) - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s: %s characters" % (user.username, len(self.opml_file)) - + meta = { - 'collection': 'uploaded_opml', - 'allow_inheritance': False, - 'order': '-upload_date', - 'indexes': ['user_id', '-upload_date'], + "collection": "uploaded_opml", + "allow_inheritance": False, + "order": "-upload_date", + "indexes": ["user_id", "-upload_date"], } diff --git a/apps/feed_import/tasks.py b/apps/feed_import/tasks.py index 8f9d7e67fd..7e70f5fd20 100644 --- a/apps/feed_import/tasks.py +++ b/apps/feed_import/tasks.py @@ -12,14 +12,14 @@ def ProcessOPML(user_id): logging.user(user, "~FR~SBOPML upload (task) starting...") opml = UploadedOPML.objects.filter(user_id=user_id).first() - opml_importer = OPMLImporter(opml.opml_file.encode('utf-8'), user) + opml_importer = OPMLImporter(opml.opml_file.encode("utf-8"), user) opml_importer.process() - + feed_count = UserSubscription.objects.filter(user=user).count() user.profile.send_upload_opml_finished_email(feed_count) logging.user(user, "~FR~SBOPML upload (task): ~SK%s~SN~SB~FR feeds" % (feed_count)) MActivity.new_opml_import(user_id=user.pk, count=feed_count) - + UserSubscription.queue_new_feeds(user) UserSubscription.refresh_stale_feeds(user, exclude_new=True) diff --git a/apps/feed_import/test_feed_import.py b/apps/feed_import/test_feed_import.py index b739eb1570..57e429d9ab 100644 --- a/apps/feed_import/test_feed_import.py +++ b/apps/feed_import/test_feed_import.py @@ -8,62 +8,95 @@ from utils import json_functions as json_functions import json from django.core.management import call_command + + class Test_Import(TestCase): - fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', - 'opml_import.json' - ] - + fixtures = ["apps/rss_feeds/fixtures/initial_data.json", "opml_import.json"] + def setUp(self): self.client = Client() - + def test_opml_import(self): - self.client.login(username='conesus', password='test') - user = User.objects.get(username='conesus') - + self.client.login(username="conesus", password="test") + user = User.objects.get(username="conesus") + # Verify user has no feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 0) - - f = open(os.path.join(os.path.dirname(__file__), 'fixtures/opml.xml')) - response = self.client.post(reverse('opml-upload'), {'file': f}) + + f = open(os.path.join(os.path.dirname(__file__), "fixtures/opml.xml")) + response = self.client.post(reverse("opml-upload"), {"file": f}) self.assertEqual(response.status_code, 200) - + # Verify user now has feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 54) - + usf = UserSubscriptionFolders.objects.get(user=user) print(json_functions.decode(usf.folders)) - self.assertEqual(json_functions.decode(usf.folders), [{'Tech': [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]}, 1, 2, 3, 6, {'New York': [1, 2, 3, 4, 5, 6, 7, 8, 9]}, {'tech': []}, {'Blogs': [29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, {'The Bloglets': [45, 46, 47, 48, 49]}]}, {'Cooking': [50, 51, 52, 53]}, 54]) - + self.assertEqual( + json_functions.decode(usf.folders), + [ + {"Tech": [4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28]}, + 1, + 2, + 3, + 6, + {"New York": [1, 2, 3, 4, 5, 6, 7, 8, 9]}, + {"tech": []}, + { + "Blogs": [ + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + {"The Bloglets": [45, 46, 47, 48, 49]}, + ] + }, + {"Cooking": [50, 51, 52, 53]}, + 54, + ], + ) + def test_opml_import__empty(self): - self.client.login(username='conesus', password='test') - user = User.objects.get(username='conesus') - + self.client.login(username="conesus", password="test") + user = User.objects.get(username="conesus") + # Verify user has default feeds subs = UserSubscription.objects.filter(user=user) self.assertEqual(subs.count(), 0) - response = self.client.post(reverse('opml-upload')) + response = self.client.post(reverse("opml-upload")) self.assertEqual(response.status_code, 200) - + # Verify user now has feeds subs = UserSubscription.objects.filter(user=user) self.assertEquals(subs.count(), 0) + class Test_Duplicate_Feeds(TestCase): fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', + "apps/rss_feeds/fixtures/initial_data.json", ] - def test_duplicate_feeds(self): # had to load the feed data this way to hit the save() override. # it wouldn't work with loaddata or fixures - with open('apps/feed_import/fixtures/duplicate_feeds.json') as json_file: + with open("apps/feed_import/fixtures/duplicate_feeds.json") as json_file: feed_data = json.loads(json_file.read()) feed_data_1 = feed_data[0] feed_data_2 = feed_data[1] @@ -72,15 +105,15 @@ def test_duplicate_feeds(self): feed_1.save() feed_2.save() - call_command('loaddata', 'apps/feed_import/fixtures/subscriptions.json') + call_command("loaddata", "apps/feed_import/fixtures/subscriptions.json") - user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id + user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id self.assertNotEqual(user_1_feed_subscription, user_2_feed_subscription) original_feed_id = merge_feeds(user_1_feed_subscription, user_2_feed_subscription) - - user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id + + user_1_feed_subscription = UserSubscription.objects.filter(user__id=1)[0].feed_id user_2_feed_subscription = UserSubscription.objects.filter(user__id=2)[0].feed_id self.assertEqual(user_1_feed_subscription, user_2_feed_subscription) diff --git a/apps/feed_import/urls.py b/apps/feed_import/urls.py index 1fe34d44a3..feb2ace7fd 100644 --- a/apps/feed_import/urls.py +++ b/apps/feed_import/urls.py @@ -2,6 +2,6 @@ from apps.feed_import import views urlpatterns = [ - url(r'^opml_upload/?$', views.opml_upload, name='opml-upload'), - url(r'^opml_export/?$', views.opml_export, name='opml-export'), + url(r"^opml_upload/?$", views.opml_upload, name="opml-upload"), + url(r"^opml_export/?$", views.opml_export, name="opml-export"), ] diff --git a/apps/feed_import/views.py b/apps/feed_import/views.py index 9eaf98df58..b6d28882bb 100644 --- a/apps/feed_import/views.py +++ b/apps/feed_import/views.py @@ -7,6 +7,7 @@ import uuid from django.contrib.sites.models import Site from django.contrib.auth.models import User + # from django.db import IntegrityError from django.http import HttpResponse, HttpResponseRedirect from django.conf import settings @@ -29,11 +30,11 @@ def opml_upload(request): message = "OK" code = 1 payload = {} - - if request.method == 'POST': - if 'file' in request.FILES: + + if request.method == "POST": + if "file" in request.FILES: logging.user(request, "~FR~SBOPML upload starting...") - file = request.FILES['file'] + file = request.FILES["file"] xml_opml = file.read() try: UploadedOPML.objects.create(user_id=request.user.pk, opml_file=xml_opml) @@ -41,7 +42,7 @@ def opml_upload(request): folders = None code = -1 message = "There was a Unicode decode error when reading your OPML file. Ensure it's a text file with a .opml or .xml extension. Is it a zip file?" - + opml_importer = OPMLImporter(xml_opml, request.user) try: folders = opml_importer.try_processing() @@ -49,7 +50,9 @@ def opml_upload(request): folders = None ProcessOPML.delay(request.user.pk) feed_count = opml_importer.count_feeds_in_opml() - logging.user(request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count) + logging.user( + request, "~FR~SBOPML upload took too long, found %s feeds. Tasking..." % feed_count + ) payload = dict(folders=folders, delayed=True, feed_count=feed_count) code = 2 message = "" @@ -64,32 +67,35 @@ def opml_upload(request): payload = dict(folders=folders, feeds=feeds) logging.user(request, "~FR~SBOPML Upload: ~SK%s~SN~SB~FR feeds" % (len(feeds))) from apps.social.models import MActivity + MActivity.new_opml_import(user_id=request.user.pk, count=len(feeds)) UserSubscription.queue_new_feeds(request.user) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) else: message = "Attach an .opml file." code = -1 - - return HttpResponse(json.encode(dict(message=message, code=code, payload=payload)), - content_type='text/html') + + return HttpResponse( + json.encode(dict(message=message, code=code, payload=payload)), content_type="text/html" + ) + def opml_export(request): - user = get_user(request) - now = datetime.datetime.now() - if request.GET.get('user_id') and user.is_staff: - user = User.objects.get(pk=request.GET['user_id']) + user = get_user(request) + now = datetime.datetime.now() + if request.GET.get("user_id") and user.is_staff: + user = User.objects.get(pk=request.GET["user_id"]) exporter = OPMLExporter(user) - opml = exporter.process() + opml = exporter.process() from apps.social.models import MActivity + MActivity.new_opml_export(user_id=user.pk, count=exporter.feed_count) - response = HttpResponse(opml, content_type='text/xml; charset=utf-8') - response['Content-Disposition'] = 'attachment; filename=NewsBlur-%s-%s.opml' % ( + response = HttpResponse(opml, content_type="text/xml; charset=utf-8") + response["Content-Disposition"] = "attachment; filename=NewsBlur-%s-%s.opml" % ( user.username, - now.strftime('%Y-%m-%d') + now.strftime("%Y-%m-%d"), ) - - return response + return response diff --git a/apps/mobile/tests.py b/apps/mobile/tests.py index 2247054b35..3748f41ba4 100644 --- a/apps/mobile/tests.py +++ b/apps/mobile/tests.py @@ -7,6 +7,7 @@ from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ def test_basic_addition(self): """ self.failUnlessEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/mobile/urls.py b/apps/mobile/urls.py index 01e7b15e6f..5d66dda536 100644 --- a/apps/mobile/urls.py +++ b/apps/mobile/urls.py @@ -2,5 +2,5 @@ from apps.mobile import views urlpatterns = [ - url(r'^$', views.index, name='mobile-index'), + url(r"^$", views.index, name="mobile-index"), ] diff --git a/apps/mobile/views.py b/apps/mobile/views.py index f9e765f522..95333ae27b 100644 --- a/apps/mobile/views.py +++ b/apps/mobile/views.py @@ -8,5 +8,6 @@ from utils import json_functions as json from utils import log as logging + def index(request): - return render(request, 'mobile/mobile_workspace.xhtml', {}) + return render(request, "mobile/mobile_workspace.xhtml", {}) diff --git a/apps/monitor/urls.py b/apps/monitor/urls.py index 2ae4c0fdc9..4576c9b0ae 100644 --- a/apps/monitor/urls.py +++ b/apps/monitor/urls.py @@ -1,24 +1,38 @@ from django.conf.urls import url -from apps.monitor.views import ( AppServers, AppTimes, -Classifiers, DbTimes, Errors, FeedCounts, Feeds, LoadTimes, - Stories, TasksCodes, TasksPipeline, TasksServers, TasksTimes, - Updates, Users, FeedSizes +from apps.monitor.views import ( + AppServers, + AppTimes, + Classifiers, + DbTimes, + Errors, + FeedCounts, + Feeds, + LoadTimes, + Stories, + TasksCodes, + TasksPipeline, + TasksServers, + TasksTimes, + Updates, + Users, + FeedSizes, ) + urlpatterns = [ - url(r'^app-servers?$', AppServers.as_view(), name="app_servers"), - url(r'^app-times?$', AppTimes.as_view(), name="app_times"), - url(r'^classifiers?$', Classifiers.as_view(), name="classifiers"), - url(r'^db-times?$', DbTimes.as_view(), name="db_times"), - url(r'^errors?$', Errors.as_view(), name="errors"), - url(r'^feed-counts?$', FeedCounts.as_view(), name="feed_counts"), - url(r'^feed-sizes?$', FeedSizes.as_view(), name="feed_sizes"), - url(r'^feeds?$', Feeds.as_view(), name="feeds"), - url(r'^load-times?$', LoadTimes.as_view(), name="load_times"), - url(r'^stories?$', Stories.as_view(), name="stories"), - url(r'^task-codes?$', TasksCodes.as_view(), name="task_codes"), - url(r'^task-pipeline?$', TasksPipeline.as_view(), name="task_pipeline"), - url(r'^task-servers?$', TasksServers.as_view(), name="task_servers"), - url(r'^task-times?$', TasksTimes.as_view(), name="task_times"), - url(r'^updates?$', Updates.as_view(), name="updates"), - url(r'^users?$', Users.as_view(), name="users"), + url(r"^app-servers?$", AppServers.as_view(), name="app_servers"), + url(r"^app-times?$", AppTimes.as_view(), name="app_times"), + url(r"^classifiers?$", Classifiers.as_view(), name="classifiers"), + url(r"^db-times?$", DbTimes.as_view(), name="db_times"), + url(r"^errors?$", Errors.as_view(), name="errors"), + url(r"^feed-counts?$", FeedCounts.as_view(), name="feed_counts"), + url(r"^feed-sizes?$", FeedSizes.as_view(), name="feed_sizes"), + url(r"^feeds?$", Feeds.as_view(), name="feeds"), + url(r"^load-times?$", LoadTimes.as_view(), name="load_times"), + url(r"^stories?$", Stories.as_view(), name="stories"), + url(r"^task-codes?$", TasksCodes.as_view(), name="task_codes"), + url(r"^task-pipeline?$", TasksPipeline.as_view(), name="task_pipeline"), + url(r"^task-servers?$", TasksServers.as_view(), name="task_servers"), + url(r"^task-times?$", TasksTimes.as_view(), name="task_times"), + url(r"^updates?$", Updates.as_view(), name="updates"), + url(r"^users?$", Users.as_view(), name="users"), ] diff --git a/apps/monitor/views/newsblur_app_servers.py b/apps/monitor/views/newsblur_app_servers.py index aeae5286d3..59f3e59847 100755 --- a/apps/monitor/views/newsblur_app_servers.py +++ b/apps/monitor/views/newsblur_app_servers.py @@ -3,11 +3,11 @@ from django.views import View from django.shortcuts import render -class AppServers(View): +class AppServers(View): def get(self, request): - data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - #total = self.total: + data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + # total = self.total: # if total: # data['total'] = total[0]['feeds'] chart_name = "app_servers" @@ -21,38 +21,48 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + ] + ) + return list(stats) - + @property - def total(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + def total(self): + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_app_times.py b/apps/monitor/views/newsblur_app_times.py index d64b0bc158..884058f0f9 100755 --- a/apps/monitor/views/newsblur_app_times.py +++ b/apps/monitor/views/newsblur_app_times.py @@ -3,10 +3,10 @@ import datetime from django.conf import settings -class AppTimes(View): +class AppTimes(View): def get(self, request): - servers = dict((("%s" % s['_id'], s['page_load']) for s in self.stats)) + servers = dict((("%s" % s["_id"], s["page_load"]) for s in self.stats)) data = servers chart_name = "app_times" chart_type = "counter" @@ -20,21 +20,26 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "page_load" : {"$avg": "$page_load"}, - }, - }]) - + { + "$group": { + "_id": "$server", + "page_load": {"$avg": "$page_load"}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_classifiers.py b/apps/monitor/views/newsblur_classifiers.py index bc7af32abe..752fded2d5 100755 --- a/apps/monitor/views/newsblur_classifiers.py +++ b/apps/monitor/views/newsblur_classifiers.py @@ -4,13 +4,12 @@ class Classifiers(View): - def get(self, request): data = { - 'feeds': MClassifierFeed.objects._collection.count(), - 'authors': MClassifierAuthor.objects._collection.count(), - 'tags': MClassifierTag.objects._collection.count(), - 'titles': MClassifierTitle.objects._collection.count(), + "feeds": MClassifierFeed.objects._collection.count(), + "authors": MClassifierAuthor.objects._collection.count(), + "tags": MClassifierTag.objects._collection.count(), + "titles": MClassifierTitle.objects._collection.count(), } chart_name = "classifiers" @@ -24,5 +23,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_dbtimes.py b/apps/monitor/views/newsblur_dbtimes.py index d11daf595e..d23859edd1 100755 --- a/apps/monitor/views/newsblur_dbtimes.py +++ b/apps/monitor/views/newsblur_dbtimes.py @@ -3,24 +3,22 @@ from apps.statistics.models import MStatistics -class DbTimes(View): - +class DbTimes(View): def get(self, request): - data = { - 'sql_avg': MStatistics.get('latest_sql_avg'), - 'mongo_avg': MStatistics.get('latest_mongo_avg'), - 'redis_user_avg': MStatistics.get('latest_redis_user_avg'), - 'redis_story_avg': MStatistics.get('latest_redis_story_avg'), - 'redis_session_avg': MStatistics.get('latest_redis_session_avg'), - 'redis_pubsub_avg': MStatistics.get('latest_redis_pubsub_avg'), - 'task_sql_avg': MStatistics.get('latest_task_sql_avg'), - 'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), - 'task_redis_user_avg': MStatistics.get('latest_task_redis_user_avg'), - 'task_redis_story_avg': MStatistics.get('latest_task_redis_story_avg'), - 'task_redis_session_avg': MStatistics.get('latest_task_redis_session_avg'), - 'task_redis_pubsub_avg': MStatistics.get('latest_task_redis_pubsub_avg'), + "sql_avg": MStatistics.get("latest_sql_avg"), + "mongo_avg": MStatistics.get("latest_mongo_avg"), + "redis_user_avg": MStatistics.get("latest_redis_user_avg"), + "redis_story_avg": MStatistics.get("latest_redis_story_avg"), + "redis_session_avg": MStatistics.get("latest_redis_session_avg"), + "redis_pubsub_avg": MStatistics.get("latest_redis_pubsub_avg"), + "task_sql_avg": MStatistics.get("latest_task_sql_avg"), + "task_mongo_avg": MStatistics.get("latest_task_mongo_avg"), + "task_redis_user_avg": MStatistics.get("latest_task_redis_user_avg"), + "task_redis_story_avg": MStatistics.get("latest_task_redis_story_avg"), + "task_redis_session_avg": MStatistics.get("latest_task_redis_session_avg"), + "task_redis_pubsub_avg": MStatistics.get("latest_task_redis_pubsub_avg"), } chart_name = "db_times" chart_type = "counter" @@ -32,4 +30,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_errors.py b/apps/monitor/views/newsblur_errors.py index ea057efe85..7497efd4f7 100755 --- a/apps/monitor/views/newsblur_errors.py +++ b/apps/monitor/views/newsblur_errors.py @@ -3,23 +3,22 @@ from apps.statistics.models import MStatistics -class Errors(View): +class Errors(View): def get(self, request): statistics = MStatistics.all() data = { - 'feed_success': statistics['feeds_fetched'], + "feed_success": statistics["feeds_fetched"], } chart_name = "errors" chart_type = "counter" formatted_data = {} for k, v in data.items(): - formatted_data[k] = f'feed_success {v}' - + formatted_data[k] = f"feed_success {v}" + context = { "data": formatted_data, "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feed_counts.py b/apps/monitor/views/newsblur_feed_counts.py index 7a08c954c8..dbc355851f 100755 --- a/apps/monitor/views/newsblur_feed_counts.py +++ b/apps/monitor/views/newsblur_feed_counts.py @@ -6,44 +6,43 @@ from apps.push.models import PushSubscription from apps.statistics.models import MStatistics -class FeedCounts(View): +class FeedCounts(View): def get(self, request): - - exception_feeds = MStatistics.get('munin:exception_feeds') + exception_feeds = MStatistics.get("munin:exception_feeds") if not exception_feeds: exception_feeds = Feed.objects.filter(has_feed_exception=True).count() - MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12) + MStatistics.set("munin:exception_feeds", exception_feeds, 60 * 60 * 12) - exception_pages = MStatistics.get('munin:exception_pages') + exception_pages = MStatistics.get("munin:exception_pages") if not exception_pages: exception_pages = Feed.objects.filter(has_page_exception=True).count() - MStatistics.set('munin:exception_pages', exception_pages, 60*60*12) + MStatistics.set("munin:exception_pages", exception_pages, 60 * 60 * 12) - duplicate_feeds = MStatistics.get('munin:duplicate_feeds') + duplicate_feeds = MStatistics.get("munin:duplicate_feeds") if not duplicate_feeds: duplicate_feeds = DuplicateFeed.objects.count() - MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12) + MStatistics.set("munin:duplicate_feeds", duplicate_feeds, 60 * 60 * 12) - active_feeds = MStatistics.get('munin:active_feeds') + active_feeds = MStatistics.get("munin:active_feeds") if not active_feeds: active_feeds = Feed.objects.filter(active_subscribers__gt=0).count() - MStatistics.set('munin:active_feeds', active_feeds, 60*60*12) + MStatistics.set("munin:active_feeds", active_feeds, 60 * 60 * 12) - push_feeds = MStatistics.get('munin:push_feeds') + push_feeds = MStatistics.get("munin:push_feeds") if not push_feeds: push_feeds = PushSubscription.objects.filter(verified=True).count() - MStatistics.set('munin:push_feeds', push_feeds, 60*60*12) + MStatistics.set("munin:push_feeds", push_feeds, 60 * 60 * 12) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + data = { - 'scheduled_feeds': r.zcard('scheduled_updates'), - 'exception_feeds': exception_feeds, - 'exception_pages': exception_pages, - 'duplicate_feeds': duplicate_feeds, - 'active_feeds': active_feeds, - 'push_feeds': push_feeds, + "scheduled_feeds": r.zcard("scheduled_updates"), + "exception_feeds": exception_feeds, + "exception_pages": exception_pages, + "duplicate_feeds": duplicate_feeds, + "active_feeds": active_feeds, + "push_feeds": push_feeds, } chart_name = "feed_counts" chart_type = "counter" @@ -57,6 +56,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feed_sizes.py b/apps/monitor/views/newsblur_feed_sizes.py index 553aee324b..c0da56eaeb 100644 --- a/apps/monitor/views/newsblur_feed_sizes.py +++ b/apps/monitor/views/newsblur_feed_sizes.py @@ -7,23 +7,24 @@ from apps.push.models import PushSubscription from apps.statistics.models import MStatistics -class FeedSizes(View): +class FeedSizes(View): def get(self, request): - - fs_size_bytes = MStatistics.get('munin:fs_size_bytes') + fs_size_bytes = MStatistics.get("munin:fs_size_bytes") if not fs_size_bytes: - fs_size_bytes = Feed.objects.aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] - MStatistics.set('munin:fs_size_bytes', fs_size_bytes, 60*60*12) + fs_size_bytes = Feed.objects.aggregate(Sum("fs_size_bytes"))["fs_size_bytes__sum"] + MStatistics.set("munin:fs_size_bytes", fs_size_bytes, 60 * 60 * 12) - archive_users_size_bytes = MStatistics.get('munin:archive_users_size_bytes') + archive_users_size_bytes = MStatistics.get("munin:archive_users_size_bytes") if not archive_users_size_bytes: - archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate(Sum('fs_size_bytes'))['fs_size_bytes__sum'] - MStatistics.set('munin:archive_users_size_bytes', archive_users_size_bytes, 60*60*12) + archive_users_size_bytes = Feed.objects.filter(archive_subscribers__gte=1).aggregate( + Sum("fs_size_bytes") + )["fs_size_bytes__sum"] + MStatistics.set("munin:archive_users_size_bytes", archive_users_size_bytes, 60 * 60 * 12) data = { - 'fs_size_bytes': fs_size_bytes, - 'archive_users_size_bytes': archive_users_size_bytes, + "fs_size_bytes": fs_size_bytes, + "archive_users_size_bytes": archive_users_size_bytes, } chart_name = "feed_sizes" chart_type = "counter" @@ -37,6 +38,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_feeds.py b/apps/monitor/views/newsblur_feeds.py index 9c527fa820..fcacada32f 100755 --- a/apps/monitor/views/newsblur_feeds.py +++ b/apps/monitor/views/newsblur_feeds.py @@ -6,37 +6,35 @@ from apps.social.models import MSocialProfile, MSocialSubscription from apps.statistics.models import MStatistics -class Feeds(View): +class Feeds(View): def get(self, request): - - feeds_count = MStatistics.get('munin:feeds_count') + feeds_count = MStatistics.get("munin:feeds_count") if not feeds_count: feeds_count = Feed.objects.all().count() - MStatistics.set('munin:feeds_count', feeds_count, 60*60*12) + MStatistics.set("munin:feeds_count", feeds_count, 60 * 60 * 12) - subscriptions_count = MStatistics.get('munin:subscriptions_count') + subscriptions_count = MStatistics.get("munin:subscriptions_count") if not subscriptions_count: subscriptions_count = UserSubscription.objects.all().count() - MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12) + MStatistics.set("munin:subscriptions_count", subscriptions_count, 60 * 60 * 12) data = { - 'feeds': feeds_count, - 'subscriptions': subscriptions_count, - 'profiles': MSocialProfile.objects._collection.count(), - 'social_subscriptions': MSocialSubscription.objects._collection.count(), + "feeds": feeds_count, + "subscriptions": subscriptions_count, + "profiles": MSocialProfile.objects._collection.count(), + "social_subscriptions": MSocialSubscription.objects._collection.count(), } chart_name = "feeds" chart_type = "counter" formatted_data = {} for k, v in data.items(): formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' - + context = { "data": formatted_data, "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_loadtimes.py b/apps/monitor/views/newsblur_loadtimes.py index 64c93e1f60..40538f6df1 100755 --- a/apps/monitor/views/newsblur_loadtimes.py +++ b/apps/monitor/views/newsblur_loadtimes.py @@ -1,15 +1,15 @@ from django.shortcuts import render from django.views import View -class LoadTimes(View): +class LoadTimes(View): def get(self, request): from apps.statistics.models import MStatistics - + data = { - 'feed_loadtimes_1min': MStatistics.get('last_1_min_time_taken'), - 'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'), - 'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'), + "feed_loadtimes_1min": MStatistics.get("last_1_min_time_taken"), + "feed_loadtimes_avg_hour": MStatistics.get("latest_avg_time_taken"), + "feeds_loaded_hour": MStatistics.get("latest_sites_loaded"), } chart_name = "load_times" chart_type = "counter" @@ -23,5 +23,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_stories.py b/apps/monitor/views/newsblur_stories.py index 3cbe3f0b97..f88a5164c0 100755 --- a/apps/monitor/views/newsblur_stories.py +++ b/apps/monitor/views/newsblur_stories.py @@ -2,13 +2,13 @@ from django.shortcuts import render from apps.rss_feeds.models import MStory, MStarredStory from apps.rss_feeds.models import MStory, MStarredStory - -class Stories(View): + +class Stories(View): def get(self, request): data = { - 'stories': MStory.objects._collection.count(), - 'starred_stories': MStarredStory.objects._collection.count(), + "stories": MStory.objects._collection.count(), + "starred_stories": MStarredStory.objects._collection.count(), } chart_name = "stories" chart_type = "counter" @@ -21,5 +21,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_tasks_codes.py b/apps/monitor/views/newsblur_tasks_codes.py index 652a136540..952a9afed1 100755 --- a/apps/monitor/views/newsblur_tasks_codes.py +++ b/apps/monitor/views/newsblur_tasks_codes.py @@ -3,10 +3,10 @@ from django.shortcuts import render from django.views import View -class TasksCodes(View): +class TasksCodes(View): def get(self, request): - data = dict((("_%s" % s['_id'], s['feeds']) for s in self.stats)) + data = dict((("_%s" % s["_id"], s["feeds"]) for s in self.stats)) chart_name = "task_codes" chart_type = "counter" formatted_data = {} @@ -18,22 +18,26 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property - def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + def stats(self): + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$feed_code", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$feed_code", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - \ No newline at end of file diff --git a/apps/monitor/views/newsblur_tasks_pipeline.py b/apps/monitor/views/newsblur_tasks_pipeline.py index e962fb9ef4..33931f5ac9 100755 --- a/apps/monitor/views/newsblur_tasks_pipeline.py +++ b/apps/monitor/views/newsblur_tasks_pipeline.py @@ -4,10 +4,10 @@ from django.shortcuts import render from django.views import View -class TasksPipeline(View): +class TasksPipeline(View): def get(self, request): - data =self.stats + data = self.stats chart_name = "task_pipeline" chart_type = "counter" @@ -19,27 +19,31 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + @property def stats(self): - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feed_fetch": {"$avg": "$feed_fetch"}, + "feed_process": {"$avg": "$feed_process"}, + "page": {"$avg": "$page"}, + "icon": {"$avg": "$icon"}, + "total": {"$avg": "$total"}, + }, }, - }, - }, { - "$group": { - "_id": 1, - "feed_fetch": {"$avg": "$feed_fetch"}, - "feed_process": {"$avg": "$feed_process"}, - "page": {"$avg": "$page"}, - "icon": {"$avg": "$icon"}, - "total": {"$avg": "$total"}, - }, - }]) + ] + ) stats = list(stats) if stats: print(stats) diff --git a/apps/monitor/views/newsblur_tasks_servers.py b/apps/monitor/views/newsblur_tasks_servers.py index 90a26fcf37..c8bcb394f9 100755 --- a/apps/monitor/views/newsblur_tasks_servers.py +++ b/apps/monitor/views/newsblur_tasks_servers.py @@ -4,10 +4,10 @@ from django.shortcuts import render from django.views import View -class TasksServers(View): +class TasksServers(View): def get(self, request): - data = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) + data = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) chart_name = "task_servers" chart_type = "counter" @@ -19,39 +19,48 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") - @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_tasks_times.py b/apps/monitor/views/newsblur_tasks_times.py index 0d6a14f9ca..2ba7aa21b8 100755 --- a/apps/monitor/views/newsblur_tasks_times.py +++ b/apps/monitor/views/newsblur_tasks_times.py @@ -4,10 +4,10 @@ from django.shortcuts import render from django.views import View -class TasksTimes(View): +class TasksTimes(View): def get(self, request): - data = dict((("%s" % s['_id'], s['total']) for s in self.stats)) + data = dict((("%s" % s["_id"], s["total"]) for s in self.stats)) chart_name = "task_times" chart_type = "counter" @@ -19,22 +19,26 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") - @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "total" : {"$avg": "$total"}, - }, - }]) - + { + "$group": { + "_id": "$server", + "total": {"$avg": "$total"}, + }, + }, + ] + ) + return list(stats) diff --git a/apps/monitor/views/newsblur_updates.py b/apps/monitor/views/newsblur_updates.py index 38640407ff..2775e72ec4 100755 --- a/apps/monitor/views/newsblur_updates.py +++ b/apps/monitor/views/newsblur_updates.py @@ -4,26 +4,26 @@ from django.shortcuts import render from django.views import View -class Updates(View): - def get(self, request): +class Updates(View): + def get(self, request): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) data = { - 'update_queue': r.scard("queued_feeds"), - 'feeds_fetched': r.zcard("fetched_feeds_last_hour"), - 'tasked_feeds': r.zcard("tasked_feeds"), - 'error_feeds': r.zcard("error_feeds"), - 'celery_update_feeds': r.llen("update_feeds"), - 'celery_new_feeds': r.llen("new_feeds"), - 'celery_push_feeds': r.llen("push_feeds"), - 'celery_work_queue': r.llen("work_queue"), - 'celery_search_queue': r.llen("search_indexer"), + "update_queue": r.scard("queued_feeds"), + "feeds_fetched": r.zcard("fetched_feeds_last_hour"), + "tasked_feeds": r.zcard("tasked_feeds"), + "error_feeds": r.zcard("error_feeds"), + "celery_update_feeds": r.llen("update_feeds"), + "celery_new_feeds": r.llen("new_feeds"), + "celery_push_feeds": r.llen("push_feeds"), + "celery_work_queue": r.llen("work_queue"), + "celery_search_queue": r.llen("search_indexer"), } chart_name = "updates" chart_type = "counter" formatted_data = {} - + for k, v in data.items(): formatted_data[k] = f'{chart_name}{{category="{k}"}} {v}' context = { @@ -31,5 +31,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/newsblur_users.py b/apps/monitor/views/newsblur_users.py index 00218c8cde..76eed4d0f0 100755 --- a/apps/monitor/views/newsblur_users.py +++ b/apps/monitor/views/newsblur_users.py @@ -7,39 +7,63 @@ from apps.profile.models import Profile, RNewUserQueue from apps.statistics.models import MStatistics -class Users(View): +class Users(View): def get(self, request): last_year = datetime.datetime.utcnow() - datetime.timedelta(days=365) last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30) - last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24) - expiration_sec = 60*60 # 1 hour - + last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 * 24) + expiration_sec = 60 * 60 # 1 hour + data = { - 'all': MStatistics.get('munin:users_count', - lambda: User.objects.count(), - set_default=True, expiration_sec=expiration_sec), - 'yearly': MStatistics.get('munin:users_yearly', - lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(), - set_default=True, expiration_sec=expiration_sec), - 'monthly': MStatistics.get('munin:users_monthly', - lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(), - set_default=True, expiration_sec=expiration_sec), - 'daily': MStatistics.get('munin:users_daily', - lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(), - set_default=True, expiration_sec=expiration_sec), - 'premium': MStatistics.get('munin:users_premium', - lambda: Profile.objects.filter(is_premium=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'archive': MStatistics.get('munin:users_archive', - lambda: Profile.objects.filter(is_archive=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'pro': MStatistics.get('munin:users_pro', - lambda: Profile.objects.filter(is_pro=True).count(), - set_default=True, expiration_sec=expiration_sec), - 'queued': MStatistics.get('munin:users_queued', - lambda: RNewUserQueue.user_count(), - set_default=True, expiration_sec=expiration_sec), + "all": MStatistics.get( + "munin:users_count", + lambda: User.objects.count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "yearly": MStatistics.get( + "munin:users_yearly", + lambda: Profile.objects.filter(last_seen_on__gte=last_year).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "monthly": MStatistics.get( + "munin:users_monthly", + lambda: Profile.objects.filter(last_seen_on__gte=last_month).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "daily": MStatistics.get( + "munin:users_daily", + lambda: Profile.objects.filter(last_seen_on__gte=last_day).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "premium": MStatistics.get( + "munin:users_premium", + lambda: Profile.objects.filter(is_premium=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "archive": MStatistics.get( + "munin:users_archive", + lambda: Profile.objects.filter(is_archive=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "pro": MStatistics.get( + "munin:users_pro", + lambda: Profile.objects.filter(is_pro=True).count(), + set_default=True, + expiration_sec=expiration_sec, + ), + "queued": MStatistics.get( + "munin:users_queued", + lambda: RNewUserQueue.user_count(), + set_default=True, + expiration_sec=expiration_sec, + ), } chart_name = "users" chart_type = "counter" @@ -52,5 +76,4 @@ def get(self, request): "chart_name": chart_name, "chart_type": chart_type, } - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") - + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") diff --git a/apps/monitor/views/prometheus_redis.py b/apps/monitor/views/prometheus_redis.py index 8176287979..9c2486ce82 100644 --- a/apps/monitor/views/prometheus_redis.py +++ b/apps/monitor/views/prometheus_redis.py @@ -12,6 +12,7 @@ RedisSize """ + class RedisGrafanaMetric(View): category = "Redis" @@ -23,9 +24,9 @@ def autoconf(self): return True def get_info(self): - host = os.environ.get('REDIS_HOST') or '127.0.0.1' - port = int(os.environ.get('REDIS_PORT') or '6379') - if host.startswith('/'): + host = os.environ.get("REDIS_HOST") or "127.0.0.1" + port = int(os.environ.get("REDIS_PORT") or "6379") + if host.startswith("/"): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect(host) else: @@ -33,9 +34,9 @@ def get_info(self): s.connect((host, port)) s.send("*1\r\n$4\r\ninfo\r\n") buf = "" - while '\r\n' not in buf: + while "\r\n" not in buf: buf += s.recv(1024) - l, buf = buf.split('\r\n', 1) + l, buf = buf.split("\r\n", 1) if l[0] != "$": s.close() raise Exception("Protocol error") @@ -43,7 +44,7 @@ def get_info(self): if remaining > 0: buf += s.recv(remaining) s.close() - return dict(x.split(':', 1) for x in buf.split('\r\n') if ':' in x) + return dict(x.split(":", 1) for x in buf.split("\r\n") if ":" in x) def execute(self): stats = self.get_info() @@ -57,25 +58,28 @@ def execute(self): return values def get_fields(self): - raise NotImplementedError('You must implement the get_fields function') + raise NotImplementedError("You must implement the get_fields function") def get_context(self): - raise NotImplementedError('You must implement the get_context function') - + raise NotImplementedError("You must implement the get_context function") + def get(self, request): context = self.get_context() - return render(request, 'monitor/prometheus_data.html', context, content_type="text/plain") + return render(request, "monitor/prometheus_data.html", context, content_type="text/plain") + class RedisActiveConnection(RedisGrafanaMetric): - def get_fields(self): return ( - ('connected_clients', dict( - label = "connections", - info = "connections", - type = "GAUGE", - )), + ( + "connected_clients", + dict( + label="connections", + info="connections", + type="GAUGE", + ), + ), ) def get_context(self): - raise NotImplementedError('You must implement the get_context function') + raise NotImplementedError("You must implement the get_context function") diff --git a/apps/newsletters/models.py b/apps/newsletters/models.py index b7bd880cd5..31950ac2d7 100644 --- a/apps/newsletters/models.py +++ b/apps/newsletters/models.py @@ -17,23 +17,23 @@ from utils.story_functions import linkify from utils.scrubber import Scrubber + class EmailNewsletter: - def receive_newsletter(self, params): - user = self._user_from_email(params['recipient']) + user = self._user_from_email(params["recipient"]) if not user: return - - sender_name, sender_username, sender_domain = self._split_sender(params['from']) + + sender_name, sender_username, sender_domain = self._split_sender(params["from"]) feed_address = self._feed_address(user, "%s@%s" % (sender_username, sender_domain)) - + try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: logging.user(user, "~FRUser does not have a USF, ignoring newsletter.") return - usf.add_folder('', 'Newsletters') - + usf.add_folder("", "Newsletters") + # First look for the email address try: feed = Feed.objects.get(feed_address=feed_address) @@ -46,45 +46,47 @@ def receive_newsletter(self, params): # If not found, check among titles user has subscribed to if not feed: - newsletter_subs = UserSubscription.objects.filter(user=user, feed__feed_address__contains="newsletter:").only('feed') + newsletter_subs = UserSubscription.objects.filter( + user=user, feed__feed_address__contains="newsletter:" + ).only("feed") newsletter_feed_ids = [us.feed.pk for us in newsletter_subs] feeds = Feed.objects.filter(feed_title__iexact=sender_name, pk__in=newsletter_feed_ids) if feeds.count(): feed = feeds[0] - + # Create a new feed if it doesn't exist by sender name or email if not feed: - feed = Feed.objects.create(feed_address=feed_address, - feed_link='http://' + sender_domain, - feed_title=sender_name, - fetched_once=True, - known_good=True) + feed = Feed.objects.create( + feed_address=feed_address, + feed_link="http://" + sender_domain, + feed_title=sender_name, + fetched_once=True, + known_good=True, + ) feed.update() logging.user(user, "~FCCreating newsletter feed: ~SB%s" % (feed)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:%s' % feed.pk) + r.publish(user.username, "reload:%s" % feed.pk) self._check_if_first_newsletter(user) - + feed.last_update = datetime.datetime.now() feed.last_story_date = datetime.datetime.now() feed.save() - + if feed.feed_title != sender_name: feed.feed_title = sender_name feed.save() - + try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: _, _, usersub = UserSubscription.add_subscription( - user=user, - feed_address=feed_address, - folder='Newsletters' + user=user, feed_address=feed_address, folder="Newsletters" ) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:feeds') - - story_hash = MStory.ensure_story_hash(params['signature'], feed.pk) + r.publish(user.username, "reload:feeds") + + story_hash = MStory.ensure_story_hash(params["signature"], feed.pk) story_content = self._get_content(params) plain_story_content = self._get_content(params, force_plain=True) if len(plain_story_content) > len(story_content): @@ -92,15 +94,16 @@ def receive_newsletter(self, params): story_content = self._clean_content(story_content) story_params = { "story_feed_id": feed.pk, - "story_date": datetime.datetime.fromtimestamp(int(params['timestamp'])), - "story_title": params['subject'], + "story_date": datetime.datetime.fromtimestamp(int(params["timestamp"])), + "story_title": params["subject"], "story_content": story_content, - "story_author_name": params['from'], - "story_permalink": "https://%s%s" % ( - Site.objects.get_current().domain, - reverse('newsletter-story', - kwargs={'story_hash': story_hash})), - "story_guid": params['signature'], + "story_author_name": params["from"], + "story_permalink": "https://%s%s" + % ( + Site.objects.get_current().domain, + reverse("newsletter-story", kwargs={"story_hash": story_hash}), + ), + "story_guid": params["signature"], } try: @@ -108,17 +111,17 @@ def receive_newsletter(self, params): except MStory.DoesNotExist: story = MStory(**story_params) story.save() - + usersub.needs_unread_recalc = True usersub.save() - + self._publish_to_subscribers(feed, story.story_hash) - - MFetchHistory.add(feed_id=feed.pk, fetch_type='push') + + MFetchHistory.add(feed_id=feed.pk, fetch_type="push") logging.user(user, "~FCNewsletter feed story: ~SB%s~SN / ~SB%s" % (story.story_title, feed)) - + return story - + def _check_if_first_newsletter(self, user, force=False): if not user.email: return @@ -129,10 +132,10 @@ def _check_if_first_newsletter(self, user, force=False): if sub.feed.is_newsletter: found_newsletter = True break - if not found_newsletter and not force: - return - - params = dict(receiver_user_id=user.pk, email_type='first_newsletter') + if not found_newsletter and not force: + return + + params = dict(receiver_user_id=user.pk, email_type="first_newsletter") try: MSentEmail.objects.get(**params) if not force: @@ -140,23 +143,26 @@ def _check_if_first_newsletter(self, user, force=False): return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - text = render_to_string('mail/email_first_newsletter.txt', {}) - html = render_to_string('mail/email_first_newsletter.xhtml', {}) + + text = render_to_string("mail/email_first_newsletter.txt", {}) + html = render_to_string("mail/email_first_newsletter.xhtml", {}) subject = "Your email newsletters are now being sent to NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(user, "~BB~FM~SBSending first newsletter email to: %s" % user.email) - + def _user_from_email(self, email): - tokens = re.search('(\w+)[\+\-\.](\w+)@newsletters.newsblur.com', email) + tokens = re.search("(\w+)[\+\-\.](\w+)@newsletters.newsblur.com", email) if not tokens: return - + username, secret_token = tokens.groups() try: profiles = Profile.objects.filter(secret_token=secret_token) @@ -165,55 +171,56 @@ def _user_from_email(self, email): profile = profiles[0] except Profile.DoesNotExist: return - + return profile.user - + def _feed_address(self, user, sender_email): - return 'newsletter:%s:%s' % (user.pk, sender_email) - + return "newsletter:%s:%s" % (user.pk, sender_email) + def _split_sender(self, sender): - tokens = re.search('(.*?) <(.*?)@(.*?)>', sender) + tokens = re.search("(.*?) <(.*?)@(.*?)>", sender) if not tokens: - name, domain = sender.split('@') + name, domain = sender.split("@") return name, sender, domain - + sender_name, sender_username, sender_domain = tokens.group(1), tokens.group(2), tokens.group(3) - sender_name = sender_name.replace('"', '') - + sender_name = sender_name.replace('"', "") + return sender_name, sender_username, sender_domain - + def _get_content(self, params, force_plain=False): - if 'body-enriched' in params and not force_plain: - return params['body-enriched'] - if 'body-html' in params and not force_plain: - return params['body-html'] - if 'stripped-html' in params and not force_plain: - return params['stripped-html'] - if 'body-plain' in params: - return linkify(linebreaks(params['body-plain'])) - + if "body-enriched" in params and not force_plain: + return params["body-enriched"] + if "body-html" in params and not force_plain: + return params["body-html"] + if "stripped-html" in params and not force_plain: + return params["stripped-html"] + if "body-plain" in params: + return linkify(linebreaks(params["body-plain"])) + if force_plain: return self._get_content(params, force_plain=False) - + def _clean_content(self, content): original = content scrubber = Scrubber() content = scrubber.scrub(content) - if len(content) < len(original)*0.01: + if len(content) < len(original) * 0.01: content = original - content = content.replace('!important', '') + content = content.replace("!important", "") return content - + def _publish_to_subscribers(self, feed, story_hash): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - listeners_count = r.publish("%s:story" % feed.pk, 'story:new:%s' % story_hash) + listeners_count = r.publish("%s:story" % feed.pk, "story:new:%s" % story_hash) if listeners_count: - logging.debug(" ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count)) + logging.debug( + " ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count) + ) except redis.ConnectionError: logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.log_title[:30],)) - + if MUserFeedNotification.feed_has_users(feed.pk) > 0: QueueNotifications.delay(feed.pk, 1) - \ No newline at end of file diff --git a/apps/newsletters/urls.py b/apps/newsletters/urls.py index 224e2cc095..e6263f1894 100644 --- a/apps/newsletters/urls.py +++ b/apps/newsletters/urls.py @@ -2,6 +2,6 @@ from apps.newsletters import views urlpatterns = [ - url(r'^receive/?$', views.newsletter_receive, name='newsletter-receive'), - url(r'^story/(?P[\w:]+)/?$', views.newsletter_story, name='newsletter-story'), + url(r"^receive/?$", views.newsletter_receive, name="newsletter-receive"), + url(r"^story/(?P[\w:]+)/?$", views.newsletter_story, name="newsletter-story"), ] diff --git a/apps/newsletters/views.py b/apps/newsletters/views.py index 4489127834..f4ed35b1b5 100644 --- a/apps/newsletters/views.py +++ b/apps/newsletters/views.py @@ -5,9 +5,10 @@ from apps.newsletters.models import EmailNewsletter from apps.rss_feeds.models import Feed, MStory + def newsletter_receive(request): """ - This function is called by mailgun's receive email feature. This is a + This function is called by mailgun's receive email feature. This is a private API used for the newsletter app. """ # params = { @@ -42,24 +43,25 @@ def newsletter_receive(request): # 'Subject':'Test Newsletter theskimm' # } params = request.POST - - response = HttpResponse('OK') - - if settings.DEBUG or 'samuel' in params.get('To', ''): + + response = HttpResponse("OK") + + if settings.DEBUG or "samuel" in params.get("To", ""): logging.debug(" ---> Email newsletter: %s" % params) - + if not params or not len(params.keys()): logging.debug(" ***> Email newsletter blank body: %s" % request.body) raise Http404 - + email_newsletter = EmailNewsletter() story = email_newsletter.receive_newsletter(params) - + if not story: raise Http404 - + return response + def newsletter_story(request, story_hash): try: story = MStory.objects.get(story_hash=story_hash) @@ -67,4 +69,4 @@ def newsletter_story(request, story_hash): raise Http404 story = Feed.format_story(story) - return HttpResponse(story['story_content']) + return HttpResponse(story["story_content"]) diff --git a/apps/notifications/models.py b/apps/notifications/models.py index 253dab95e5..3542415f43 100644 --- a/apps/notifications/models.py +++ b/apps/notifications/models.py @@ -40,21 +40,21 @@ class NotificationFrequency(enum.Enum): class MUserNotificationTokens(mongo.Document): - '''A user's push notification tokens''' + """A user's push notification tokens""" user_id = mongo.IntField() ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) use_sandbox = mongo.BooleanField(default=False) meta = { - 'collection': 'notification_tokens', - 'indexes': [ + "collection": "notification_tokens", + "indexes": [ { - 'fields': ['user_id'], - 'unique': True, + "fields": ["user_id"], + "unique": True, } ], - 'allow_inheritance': False, + "allow_inheritance": False, } @classmethod @@ -68,7 +68,7 @@ def get_tokens_for_user(cls, user_id): class MUserFeedNotification(mongo.Document): - '''A user's notifications of a single feed.''' + """A user's notifications of a single feed.""" user_id = mongo.IntField() feed_id = mongo.IntField() @@ -82,32 +82,32 @@ class MUserFeedNotification(mongo.Document): ios_tokens = mongo.ListField(mongo.StringField(max_length=1024)) meta = { - 'collection': 'notifications', - 'indexes': [ - 'feed_id', + "collection": "notifications", + "indexes": [ + "feed_id", { - 'fields': ['user_id', 'feed_id'], - 'unique': True, + "fields": ["user_id", "feed_id"], + "unique": True, }, ], - 'allow_inheritance': False, + "allow_inheritance": False, } def __str__(self): notification_types = [] if self.is_email: - notification_types.append('email') + notification_types.append("email") if self.is_web: - notification_types.append('web') + notification_types.append("web") if self.is_ios: - notification_types.append('ios') + notification_types.append("ios") if self.is_android: - notification_types.append('android') + notification_types.append("android") return "%s/%s: %s -> %s" % ( User.objects.get(pk=self.user_id).username, Feed.get_by_id(self.feed_id), - ','.join(notification_types), + ",".join(notification_types), self.last_notification_date, ) @@ -128,17 +128,17 @@ def feeds_for_user(cls, user_id): for feed in notifications: notifications_by_feed[feed.feed_id] = { - 'notification_types': [], - 'notification_filter': "focus" if feed.is_focus else "unread", + "notification_types": [], + "notification_filter": "focus" if feed.is_focus else "unread", } if feed.is_email: - notifications_by_feed[feed.feed_id]['notification_types'].append('email') + notifications_by_feed[feed.feed_id]["notification_types"].append("email") if feed.is_web: - notifications_by_feed[feed.feed_id]['notification_types'].append('web') + notifications_by_feed[feed.feed_id]["notification_types"].append("web") if feed.is_ios: - notifications_by_feed[feed.feed_id]['notification_types'].append('ios') + notifications_by_feed[feed.feed_id]["notification_types"].append("ios") if feed.is_android: - notifications_by_feed[feed.feed_id]['notification_types'].append('android') + notifications_by_feed[feed.feed_id]["notification_types"].append("android") return notifications_by_feed @@ -153,7 +153,7 @@ def push_feed_notifications(cls, feed_id, new_stories, force=False): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) latest_story_hashes = r.zrange("zF:%s" % feed.pk, -1 * new_stories, -1) - mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by('-story_date') + mstories = MStory.objects.filter(story_hash__in=latest_story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) total_sent_count = 0 @@ -186,19 +186,19 @@ def push_feed_notifications(cls, feed_id, new_stories, force=False): if settings.DEBUG: logging.debug("Sent too many, ignoring...") continue - if story['story_date'] <= last_notification_date and not force: + if story["story_date"] <= last_notification_date and not force: if settings.DEBUG: logging.debug( "Story date older than last notification date: %s <= %s" - % (story['story_date'], last_notification_date) + % (story["story_date"], last_notification_date) ) continue - if story['story_date'] > user_feed_notification.last_notification_date: - user_feed_notification.last_notification_date = story['story_date'] + if story["story_date"] > user_feed_notification.last_notification_date: + user_feed_notification.last_notification_date = story["story_date"] user_feed_notification.save() - story['story_content'] = html.unescape(story['story_content']) + story["story_content"] = html.unescape(story["story_content"]) sent = user_feed_notification.push_story_notification(story, classifiers, usersub) if sent: @@ -209,49 +209,40 @@ def push_feed_notifications(cls, feed_id, new_stories, force=False): def classifiers(self, usersub): classifiers = {} if usersub.is_trained: - classifiers['feeds'] = list( - MClassifierFeed.objects( - user_id=self.user_id, feed_id=self.feed_id, social_user_id=0 - ) + classifiers["feeds"] = list( + MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0) ) - classifiers['authors'] = list( + classifiers["authors"] = list( MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id) ) - classifiers['titles'] = list( - MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id) - ) - classifiers['tags'] = list( - MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id) - ) + classifiers["titles"] = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifiers["tags"] = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) return classifiers def title_and_body(self, story, usersub, notification_title_only=False): def replace_with_newlines(element): - text = '' + text = "" for elem in element.recursiveChildGenerator(): if isinstance(elem, (str,)): text += elem - elif elem.name == 'br': - text += '\n' - elif elem.name == 'p': - text += '\n\n' - text = re.sub(r' +', ' ', text).strip() + elif elem.name == "br": + text += "\n" + elif elem.name == "p": + text += "\n\n" + text = re.sub(r" +", " ", text).strip() return text feed_title = usersub.user_title or usersub.feed.feed_title # title = "%s: %s" % (feed_title, story['story_title']) title = feed_title - soup = BeautifulSoup(story['story_content'].strip(), features="lxml") + soup = BeautifulSoup(story["story_content"].strip(), features="lxml") # if notification_title_only: subtitle = None - body_title = html.unescape(story['story_title']).strip() + body_title = html.unescape(story["story_title"]).strip() body_content = replace_with_newlines(soup) if body_content: - if ( - body_title == body_content[: len(body_title)] - or body_content[:100] == body_title[:100] - ): + if body_title == body_content[: len(body_title)] or body_content[:100] == body_title[:100]: body_content = "" else: body_content = f"\n※ {body_content}" @@ -283,7 +274,7 @@ def push_story_notification(self, story, classifiers, usersub): logging.user( user, "~FCSending push notification: %s/%s (score: %s)" - % (story['story_title'][:40], story['story_hash'], story_score), + % (story["story_title"][:40], story["story_hash"], story_score), ) self.send_web(story, user) @@ -298,7 +289,7 @@ def send_web(self, story, user): return r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'notification:%s,%s' % (story['story_hash'], story['story_title'])) + r.publish(user.username, "notification:%s,%s" % (story["story_hash"], story["story_title"])) def send_ios(self, story, user, usersub): if not self.is_ios: @@ -319,45 +310,42 @@ def send_ios(self, story, user, usersub): # 7. cat aps.pem aps_key.noenc.pem > aps.p12.pem # 8. Verify: openssl s_client -connect gateway.push.apple.com:2195 -cert aps.p12.pem # 9. Deploy: aps -l work -t apns,repo,celery - apns = APNsClient( - '/srv/newsblur/config/certificates/aps.p12.pem', use_sandbox=tokens.use_sandbox - ) + apns = APNsClient("/srv/newsblur/config/certificates/aps.p12.pem", use_sandbox=tokens.use_sandbox) - notification_title_only = is_true(user.profile.preference_value('notification_title_only')) + notification_title_only = is_true(user.profile.preference_value("notification_title_only")) title, subtitle, body = self.title_and_body(story, usersub, notification_title_only) image_url = None - if len(story['image_urls']): - image_url = story['image_urls'][0] + if len(story["image_urls"]): + image_url = story["image_urls"][0] # print image_url confirmed_ios_tokens = [] for token in tokens.ios_tokens: logging.user( user, - '~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s' - % (story['story_title'][:50], usersub.feed.feed_title[:50]), + "~BMStory notification by iOS: ~FY~SB%s~SN~BM~FY/~SB%s" + % (story["story_title"][:50], usersub.feed.feed_title[:50]), ) payload = Payload( - alert={'title': title, 'subtitle': subtitle, 'body': body}, + alert={"title": title, "subtitle": subtitle, "body": body}, category="STORY_CATEGORY", mutable_content=True, custom={ - 'story_hash': story['story_hash'], - 'story_feed_id': story['story_feed_id'], - 'image_url': image_url, + "story_hash": story["story_hash"], + "story_feed_id": story["story_feed_id"], + "image_url": image_url, }, ) try: apns.send_notification(token, payload, topic="com.newsblur.NewsBlur") except (BadDeviceToken, Unregistered, DeviceTokenNotForTopic): - logging.user(user, '~BMiOS token expired: ~FR~SB%s' % (token[:50])) + logging.user(user, "~BMiOS token expired: ~FR~SB%s" % (token[:50])) else: confirmed_ios_tokens.append(token) if settings.DEBUG: logging.user( user, - '~BMiOS token good: ~FB~SB%s / %s' - % (token[:50], len(confirmed_ios_tokens)), + "~BMiOS token good: ~FB~SB%s / %s" % (token[:50], len(confirmed_ios_tokens)), ) if len(confirmed_ios_tokens) < len(tokens.ios_tokens): @@ -379,11 +367,14 @@ def send_email(self, story, usersub): r.expire(emails_sent_date_key, 60 * 60 * 24) # Keep for a day count = int(r.hget(emails_sent_date_key, usersub.user_id) or 0) if count > settings.MAX_EMAILS_SENT_PER_DAY_PER_USER: - logging.user(usersub.user, "~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count)) + logging.user( + usersub.user, + "~BMSent too many email Story notifications by email: ~FR~SB%s~SN~FR emails" % (count), + ) return feed = usersub.feed - story_content = self.sanitize_story(story['story_content']) + story_content = self.sanitize_story(story["story_content"]) params = { "story": story, @@ -392,14 +383,14 @@ def send_email(self, story, usersub): "feed_title": usersub.user_title or feed.feed_title, "favicon_border": feed.favicon_color, } - from_address = 'notifications@newsblur.com' - to_address = '%s <%s>' % (usersub.user.username, usersub.user.email) - text = render_to_string('mail/email_story_notification.txt', params) - html = render_to_string('mail/email_story_notification.xhtml', params) - subject = '%s: %s' % (usersub.user_title or usersub.feed.feed_title, story['story_title']) - subject = subject.replace('\n', ' ') + from_address = "notifications@newsblur.com" + to_address = "%s <%s>" % (usersub.user.username, usersub.user.email) + text = render_to_string("mail/email_story_notification.txt", params) + html = render_to_string("mail/email_story_notification.xhtml", params) + subject = "%s: %s" % (usersub.user_title or usersub.feed.feed_title, story["story_title"]) + subject = subject.replace("\n", " ") msg = EmailMultiAlternatives( - subject, text, from_email='NewsBlur <%s>' % from_address, to=[to_address] + subject, text, from_email="NewsBlur <%s>" % from_address, to=[to_address] ) msg.attach_alternative(html, "text/html") # try: @@ -409,8 +400,8 @@ def send_email(self, story, usersub): # return logging.user( usersub.user, - '~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s' - % (story['story_title'][:50], usersub.feed.feed_title[:50]), + "~BMStory notification by email: ~FY~SB%s~SN~BM~FY/~SB%s" + % (story["story_title"][:50], usersub.feed.feed_title[:50]), ) def sanitize_story(self, story_content): @@ -419,15 +410,15 @@ def sanitize_story(self, story_content): # Convert videos in newsletters to images for iframe in soup("iframe"): - url = dict(iframe.attrs).get('src', "") + url = dict(iframe.attrs).get("src", "") youtube_id = self.extract_youtube_id(url) if youtube_id: - a = soup.new_tag('a', href=url) + a = soup.new_tag("a", href=url) img = soup.new_tag( - 'img', + "img", style="display: block; 'background-image': \"url(https://%s/img/reader/youtube_play.png), url(http://img.youtube.com/vi/%s/0.jpg)\"" % (fqdn, youtube_id), - src='http://img.youtube.com/vi/%s/0.jpg' % youtube_id, + src="http://img.youtube.com/vi/%s/0.jpg" % youtube_id, ) a.insert(0, img) iframe.replaceWith(a) @@ -439,20 +430,20 @@ def sanitize_story(self, story_content): def extract_youtube_id(self, url): youtube_id = None - if 'youtube.com' in url: + if "youtube.com" in url: youtube_parts = urllib.parse.urlparse(url) - if '/embed/' in youtube_parts.path: - youtube_id = youtube_parts.path.replace('/embed/', '') + if "/embed/" in youtube_parts.path: + youtube_id = youtube_parts.path.replace("/embed/", "") return youtube_id def story_score(self, story, classifiers): score = compute_story_score( story, - classifier_titles=classifiers.get('titles', []), - classifier_authors=classifiers.get('authors', []), - classifier_tags=classifiers.get('tags', []), - classifier_feeds=classifiers.get('feeds', []), + classifier_titles=classifiers.get("titles", []), + classifier_authors=classifiers.get("authors", []), + classifier_tags=classifiers.get("tags", []), + classifier_feeds=classifiers.get("feeds", []), ) return score diff --git a/apps/notifications/urls.py b/apps/notifications/urls.py index a304edb6f9..53bd617fd2 100644 --- a/apps/notifications/urls.py +++ b/apps/notifications/urls.py @@ -3,9 +3,9 @@ from oauth2_provider import views as op_views urlpatterns = [ - url(r'^$', views.notifications_by_feed, name='notifications-by-feed'), - url(r'^feed/?$', views.set_notifications_for_feed, name='set-notifications-for-feed'), - url(r'^apns_token/?$', views.set_apns_token, name='set-apns-token'), - url(r'^android_token/?$', views.set_android_token, name='set-android-token'), - url(r'^force_push/?$', views.force_push, name='force-push-notification'), -] \ No newline at end of file + url(r"^$", views.notifications_by_feed, name="notifications-by-feed"), + url(r"^feed/?$", views.set_notifications_for_feed, name="set-notifications-for-feed"), + url(r"^apns_token/?$", views.set_apns_token, name="set-apns-token"), + url(r"^android_token/?$", views.set_android_token, name="set-android-token"), + url(r"^force_push/?$", views.force_push, name="force-push-notification"), +] diff --git a/apps/notifications/views.py b/apps/notifications/views.py index b31315f579..8c26622782 100644 --- a/apps/notifications/views.py +++ b/apps/notifications/views.py @@ -17,82 +17,90 @@ def notifications_by_feed(request): return notifications_by_feed + @ajax_login_required @json.json_view def set_notifications_for_feed(request): user = get_user(request) - feed_id = request.POST['feed_id'] - notification_types = request.POST.getlist('notification_types') or request.POST.getlist('notification_types[]') - notification_filter = request.POST.get('notification_filter') - + feed_id = request.POST["feed_id"] + notification_types = request.POST.getlist("notification_types") or request.POST.getlist( + "notification_types[]" + ) + notification_filter = request.POST.get("notification_filter") + try: notification = MUserFeedNotification.objects.get(user_id=user.pk, feed_id=feed_id) except MUserFeedNotification.DoesNotExist: params = { - "user_id": user.pk, + "user_id": user.pk, "feed_id": feed_id, } notification = MUserFeedNotification.objects.create(**params) - + web_was_off = not notification.is_web notification.is_focus = bool(notification_filter == "focus") - notification.is_email = bool('email' in notification_types) - notification.is_ios = bool('ios' in notification_types) - notification.is_android = bool('android' in notification_types) - notification.is_web = bool('web' in notification_types) + notification.is_email = bool("email" in notification_types) + notification.is_ios = bool("ios" in notification_types) + notification.is_android = bool("android" in notification_types) + notification.is_web = bool("web" in notification_types) notification.save() - - if (not notification.is_email and - not notification.is_ios and - not notification.is_android and - not notification.is_web): + + if ( + not notification.is_email + and not notification.is_ios + and not notification.is_android + and not notification.is_web + ): notification.delete() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) if web_was_off and notification.is_web: - r.publish(user.username, 'notification:setup:%s' % feed_id) - + r.publish(user.username, "notification:setup:%s" % feed_id) + notifications_by_feed = MUserFeedNotification.feeds_for_user(user.pk) return {"notifications_by_feed": notifications_by_feed} + @ajax_login_required @json.json_view def set_apns_token(request): """ - Apple Push Notification Service, token is sent by the iOS app. Used to send + Apple Push Notification Service, token is sent by the iOS app. Used to send push notifications to iOS. """ user = get_user(request) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) - apns_token = request.POST['apns_token'] - + apns_token = request.POST["apns_token"] + logging.user(user, "~FCUpdating APNS push token") if apns_token not in tokens.ios_tokens: tokens.ios_tokens.append(apns_token) tokens.save() - return {'message': 'Token saved.'} - - return {'message': 'Token already saved.'} + return {"message": "Token saved."} + + return {"message": "Token already saved."} + @ajax_login_required @json.json_view def set_android_token(request): """ - Android's push notification tokens. Not sure why I can't find this function in + Android's push notification tokens. Not sure why I can't find this function in the Android code. """ user = get_user(request) tokens = MUserNotificationTokens.get_tokens_for_user(user.pk) - token = request.POST['token'] - + token = request.POST["token"] + logging.user(user, "~FCUpdating Android push token") if token not in tokens.android_tokens: tokens.android_tokens.append(token) tokens.save() - return {'message': 'Token saved.'} - - return {'message': 'Token already saved.'} + return {"message": "Token saved."} + + return {"message": "Token already saved."} + @required_params(feed_id=int) @staff_member_required @@ -102,10 +110,12 @@ def force_push(request): Intended to force a push notification for a feed for testing. Handier than the console. """ user = get_user(request) - feed_id = request.GET['feed_id'] - count = int(request.GET.get('count', 1)) - + feed_id = request.GET["feed_id"] + count = int(request.GET.get("count", 1)) + logging.user(user, "~BM~FWForce pushing %s stories: ~SB%s" % (count, Feed.get_by_id(feed_id))) - sent_count, user_count = MUserFeedNotification.push_feed_notifications(feed_id, new_stories=count, force=True) - - return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)} \ No newline at end of file + sent_count, user_count = MUserFeedNotification.push_feed_notifications( + feed_id, new_stories=count, force=True + ) + + return {"message": "Pushed %s notifications to %s users" % (sent_count, user_count)} diff --git a/apps/oauth/models.py b/apps/oauth/models.py index c6ff4f1c43..c1522cf114 100644 --- a/apps/oauth/models.py +++ b/apps/oauth/models.py @@ -1 +1 @@ -# No models for OAuth. Use MSocialServices model in social. \ No newline at end of file +# No models for OAuth. Use MSocialServices model in social. diff --git a/apps/oauth/urls.py b/apps/oauth/urls.py index 66742e5cf0..7dcb326e1c 100644 --- a/apps/oauth/urls.py +++ b/apps/oauth/urls.py @@ -3,33 +3,43 @@ from oauth2_provider import views as op_views urlpatterns = [ - url(r'^twitter_connect/?$', views.twitter_connect, name='twitter-connect'), - url(r'^facebook_connect/?$', views.facebook_connect, name='facebook-connect'), - url(r'^twitter_disconnect/?$', views.twitter_disconnect, name='twitter-disconnect'), - url(r'^facebook_disconnect/?$', views.facebook_disconnect, name='facebook-disconnect'), - url(r'^follow_twitter_account/?$', views.follow_twitter_account, name='social-follow-twitter'), - url(r'^unfollow_twitter_account/?$', views.unfollow_twitter_account, name='social-unfollow-twitter'), - + url(r"^twitter_connect/?$", views.twitter_connect, name="twitter-connect"), + url(r"^facebook_connect/?$", views.facebook_connect, name="facebook-connect"), + url(r"^twitter_disconnect/?$", views.twitter_disconnect, name="twitter-disconnect"), + url(r"^facebook_disconnect/?$", views.facebook_disconnect, name="facebook-disconnect"), + url(r"^follow_twitter_account/?$", views.follow_twitter_account, name="social-follow-twitter"), + url(r"^unfollow_twitter_account/?$", views.unfollow_twitter_account, name="social-unfollow-twitter"), # Django OAuth Toolkit - url(r'^status/?$', views.ifttt_status, name="ifttt-status"), - url(r'^authorize/?$', op_views.AuthorizationView.as_view(), name="oauth-authorize"), - url(r'^token/?$', op_views.TokenView.as_view(), name="oauth-token"), - url(r'^oauth2/authorize/?$', op_views.AuthorizationView.as_view(), name="ifttt-authorize"), - url(r'^oauth2/token/?$', op_views.TokenView.as_view(), name="ifttt-token"), - url(r'^user/info/?$', views.api_user_info, name="ifttt-user-info"), - url(r'^triggers/(?Pnew-unread-(focus-)?story)/fields/feed_or_folder/options/?$', - views.api_feed_list, name="ifttt-trigger-feedlist"), - url(r'^triggers/(?Pnew-unread-(focus-)?story)/?$', - views.api_unread_story, name="ifttt-trigger-unreadstory"), - url(r'^triggers/new-saved-story/fields/story_tag/options/?$', - views.api_saved_tag_list, name="ifttt-trigger-taglist"), - url(r'^triggers/new-saved-story/?$', views.api_saved_story, name="ifttt-trigger-saved"), - url(r'^triggers/new-shared-story/fields/blurblog_user/options/?$', - views.api_shared_usernames, name="ifttt-trigger-blurbloglist"), - url(r'^triggers/new-shared-story/?$', views.api_shared_story, name="ifttt-trigger-shared"), - url(r'^actions/share-story/?$', views.api_share_new_story, name="ifttt-action-share"), - url(r'^actions/save-story/?$', views.api_save_new_story, name="ifttt-action-saved"), - url(r'^actions/add-site/?$', views.api_save_new_subscription, name="ifttt-action-subscription"), - url(r'^actions/add-site/fields/folder/options/?$', - views.api_folder_list, name="ifttt-action-folderlist"), + url(r"^status/?$", views.ifttt_status, name="ifttt-status"), + url(r"^authorize/?$", op_views.AuthorizationView.as_view(), name="oauth-authorize"), + url(r"^token/?$", op_views.TokenView.as_view(), name="oauth-token"), + url(r"^oauth2/authorize/?$", op_views.AuthorizationView.as_view(), name="ifttt-authorize"), + url(r"^oauth2/token/?$", op_views.TokenView.as_view(), name="ifttt-token"), + url(r"^user/info/?$", views.api_user_info, name="ifttt-user-info"), + url( + r"^triggers/(?Pnew-unread-(focus-)?story)/fields/feed_or_folder/options/?$", + views.api_feed_list, + name="ifttt-trigger-feedlist", + ), + url( + r"^triggers/(?Pnew-unread-(focus-)?story)/?$", + views.api_unread_story, + name="ifttt-trigger-unreadstory", + ), + url( + r"^triggers/new-saved-story/fields/story_tag/options/?$", + views.api_saved_tag_list, + name="ifttt-trigger-taglist", + ), + url(r"^triggers/new-saved-story/?$", views.api_saved_story, name="ifttt-trigger-saved"), + url( + r"^triggers/new-shared-story/fields/blurblog_user/options/?$", + views.api_shared_usernames, + name="ifttt-trigger-blurbloglist", + ), + url(r"^triggers/new-shared-story/?$", views.api_shared_story, name="ifttt-trigger-shared"), + url(r"^actions/share-story/?$", views.api_share_new_story, name="ifttt-action-share"), + url(r"^actions/save-story/?$", views.api_save_new_story, name="ifttt-action-saved"), + url(r"^actions/add-site/?$", views.api_save_new_subscription, name="ifttt-action-subscription"), + url(r"^actions/add-site/fields/folder/options/?$", views.api_folder_list, name="ifttt-action-folderlist"), ] diff --git a/apps/oauth/views.py b/apps/oauth/views.py index 3c436f2579..c4374b21ed 100644 --- a/apps/oauth/views.py +++ b/apps/oauth/views.py @@ -24,22 +24,23 @@ from utils import json_functions as json from vendor import facebook + @login_required -@render_to('social/social_connect.xhtml') +@render_to("social/social_connect.xhtml") def twitter_connect(request): twitter_consumer_key = settings.TWITTER_CONSUMER_KEY twitter_consumer_secret = settings.TWITTER_CONSUMER_SECRET - - oauth_token = request.GET.get('oauth_token') - oauth_verifier = request.GET.get('oauth_verifier') - denied = request.GET.get('denied') + + oauth_token = request.GET.get("oauth_token") + oauth_verifier = request.GET.get("oauth_verifier") + denied = request.GET.get("denied") if denied: logging.user(request, "~BB~FRDenied Twitter connect") - return {'error': 'Denied! Try connecting again.'} + return {"error": "Denied! Try connecting again."} elif oauth_token and oauth_verifier: try: auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret) - auth.request_token = request.session['twitter_request_token'] + auth.request_token = request.session["twitter_request_token"] # auth.set_request_token(oauth_token, oauth_verifier) auth.get_access_token(oauth_verifier) api = tweepy.API(auth) @@ -54,9 +55,13 @@ def twitter_connect(request): try: user = User.objects.get(pk=existing_user[0].user_id) logging.user(request, "~BB~FRFailed Twitter connect, another user: %s" % user.username) - return dict(error=("Another user (%s, %s) has " - "already connected with those Twitter credentials." - % (user.username, user.email or "no email"))) + return dict( + error=( + "Another user (%s, %s) has " + "already connected with those Twitter credentials." + % (user.username, user.email or "no email") + ) + ) except User.DoesNotExist: existing_user.delete() @@ -68,42 +73,43 @@ def twitter_connect(request): social_services.save() SyncTwitterFriends.delay(user_id=request.user.pk) - + logging.user(request, "~BB~FRFinishing Twitter connect") return {} else: # Start the OAuth process auth = tweepy.OAuthHandler(twitter_consumer_key, twitter_consumer_secret) auth_url = auth.get_authorization_url() - request.session['twitter_request_token'] = auth.request_token + request.session["twitter_request_token"] = auth.request_token logging.user(request, "~BB~FRStarting Twitter connect: %s" % auth.request_token) - return {'next': auth_url} + return {"next": auth_url} @login_required -@render_to('social/social_connect.xhtml') +@render_to("social/social_connect.xhtml") def facebook_connect(request): facebook_app_id = settings.FACEBOOK_APP_ID facebook_secret = settings.FACEBOOK_SECRET - + args = { "client_id": facebook_app_id, - "redirect_uri": "https://" + Site.objects.get_current().domain + '/oauth/facebook_connect', + "redirect_uri": "https://" + Site.objects.get_current().domain + "/oauth/facebook_connect", "scope": "user_friends", "display": "popup", } - verification_code = request.GET.get('code') + verification_code = request.GET.get("code") if verification_code: args["client_secret"] = facebook_secret args["code"] = verification_code - uri = "https://graph.facebook.com/oauth/access_token?" + \ - urllib.parse.urlencode(args) + uri = "https://graph.facebook.com/oauth/access_token?" + urllib.parse.urlencode(args) response_text = urllib.request.urlopen(uri).read() response = json.decode(response_text) - + if "access_token" not in response: - logging.user(request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response)) + logging.user( + request, "~BB~FRFailed Facebook connect, no access_token. (%s): %s" % (args, response) + ) return dict(error="Facebook has returned an error. Try connecting again.") access_token = response["access_token"] @@ -119,9 +125,13 @@ def facebook_connect(request): try: user = User.objects.get(pk=existing_user[0].user_id) logging.user(request, "~BB~FRFailed FB connect, another user: %s" % user.username) - return dict(error=("Another user (%s, %s) has " - "already connected with those Facebook credentials." - % (user.username, user.email or "no email"))) + return dict( + error=( + "Another user (%s, %s) has " + "already connected with those Facebook credentials." + % (user.username, user.email or "no email") + ) + ) except User.DoesNotExist: existing_user.delete() @@ -130,48 +140,51 @@ def facebook_connect(request): social_services.facebook_access_token = access_token social_services.syncing_facebook = True social_services.save() - + SyncFacebookFriends.delay(user_id=request.user.pk) - + logging.user(request, "~BB~FRFinishing Facebook connect") return {} - elif request.GET.get('error'): - logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get('error')) - return {'error': '%s... Try connecting again.' % request.GET.get('error')} + elif request.GET.get("error"): + logging.user(request, "~BB~FRFailed Facebook connect, error: %s" % request.GET.get("error")) + return {"error": "%s... Try connecting again." % request.GET.get("error")} else: # Start the OAuth process logging.user(request, "~BB~FRStarting Facebook connect") url = "https://www.facebook.com/dialog/oauth?" + urllib.parse.urlencode(args) - return {'next': url} + return {"next": url} + @ajax_login_required def twitter_disconnect(request): logging.user(request, "~BB~FRDisconnecting Twitter") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_twitter() - - return HttpResponseRedirect(reverse('load-user-friends')) + + return HttpResponseRedirect(reverse("load-user-friends")) + @ajax_login_required def facebook_disconnect(request): logging.user(request, "~BB~FRDisconnecting Facebook") social_services = MSocialServices.objects.get(user_id=request.user.pk) social_services.disconnect_facebook() - - return HttpResponseRedirect(reverse('load-user-friends')) - + + return HttpResponseRedirect(reverse("load-user-friends")) + + @ajax_login_required @json.json_view def follow_twitter_account(request): - username = request.POST['username'] + username = request.POST["username"] code = 1 message = "OK" - + logging.user(request, "~BB~FR~SKFollowing Twitter: %s" % username) - - if username not in ['samuelclay', 'newsblur']: + + if username not in ["samuelclay", "newsblur"]: return HttpResponseForbidden() - + social_services = MSocialServices.objects.get(user_id=request.user.pk) try: api = social_services.twitter_api() @@ -179,21 +192,22 @@ def follow_twitter_account(request): except tweepy.TweepError as e: code = -1 message = e - - return {'code': code, 'message': message} - + + return {"code": code, "message": message} + + @ajax_login_required @json.json_view def unfollow_twitter_account(request): - username = request.POST['username'] + username = request.POST["username"] code = 1 message = "OK" - + logging.user(request, "~BB~FRUnfollowing Twitter: %s" % username) - - if username not in ['samuelclay', 'newsblur']: + + if username not in ["samuelclay", "newsblur"]: return HttpResponseForbidden() - + social_services = MSocialServices.objects.get(user_id=request.user.pk) try: api = social_services.twitter_api() @@ -201,18 +215,25 @@ def unfollow_twitter_account(request): except tweepy.TweepError as e: code = -1 message = e - - return {'code': code, 'message': message} + + return {"code": code, "message": message} + @oauth_login_required def api_user_info(request): user = request.user - - return json.json_response(request, {"data": { - "name": user.username, - "id": user.pk, - }}) - + + return json.json_response( + request, + { + "data": { + "name": user.username, + "id": user.pk, + } + }, + ) + + @oauth_login_required @json.json_view def api_feed_list(request, trigger_slug=None): @@ -220,18 +241,16 @@ def api_feed_list(request, trigger_slug=None): try: usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: - return {"errors": [{ - 'message': 'Could not find feeds for user.' - }]} + return {"errors": [{"message": "Could not find feeds for user."}]} flat_folders = usf.flatten_folders() titles = [dict(label=" - Folder: All Site Stories", value="all")] feeds = {} - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) - + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) + for sub in user_subs: feeds[sub.feed_id] = sub.canonical() - + for folder_title in sorted(flat_folders.keys()): if folder_title and folder_title != " ": titles.append(dict(label=" - Folder: %s" % folder_title, value=folder_title, optgroup=True)) @@ -239,53 +258,62 @@ def api_feed_list(request, trigger_slug=None): titles.append(dict(label=" - Folder: Top Level", value="Top Level", optgroup=True)) folder_contents = [] for feed_id in flat_folders[folder_title]: - if feed_id not in feeds: continue + if feed_id not in feeds: + continue feed = feeds[feed_id] - folder_contents.append(dict(label=feed['feed_title'], value=str(feed['id']))) - folder_contents = sorted(folder_contents, key=lambda f: f['label'].lower()) + folder_contents.append(dict(label=feed["feed_title"], value=str(feed["id"]))) + folder_contents = sorted(folder_contents, key=lambda f: f["label"].lower()) titles.extend(folder_contents) - + return {"data": titles} - + + @oauth_login_required @json.json_view def api_folder_list(request, trigger_slug=None): user = request.user usf = UserSubscriptionFolders.objects.get(user=user) flat_folders = usf.flatten_folders() - if 'add-new-subscription' in request.path: + if "add-new-subscription" in request.path: titles = [] else: titles = [dict(label="All Site Stories", value="all")] - + for folder_title in sorted(flat_folders.keys()): if folder_title and folder_title != " ": titles.append(dict(label=folder_title, value=folder_title)) else: titles.append(dict(label="Top Level", value="Top Level")) - + return {"data": titles} + @oauth_login_required @json.json_view def api_saved_tag_list(request): user = request.user starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) tags = [] - + for tag in starred_counts: - if not tag['tag'] or tag['tag'] == "": continue - tags.append(dict(label="%s (%s %s)" % (tag['tag'], tag['count'], - 'story' if tag['count'] == 1 else 'stories'), - value=tag['tag'])) - tags = sorted(tags, key=lambda t: t['value'].lower()) - catchall = dict(label="All Saved Stories (%s %s)" % (starred_count, - 'story' if starred_count == 1 else 'stories'), - value="all") + if not tag["tag"] or tag["tag"] == "": + continue + tags.append( + dict( + label="%s (%s %s)" % (tag["tag"], tag["count"], "story" if tag["count"] == 1 else "stories"), + value=tag["tag"], + ) + ) + tags = sorted(tags, key=lambda t: t["value"].lower()) + catchall = dict( + label="All Saved Stories (%s %s)" % (starred_count, "story" if starred_count == 1 else "stories"), + value="all", + ) tags.insert(0, catchall) - + return {"data": tags} + @oauth_login_required @json.json_view def api_shared_usernames(request): @@ -294,28 +322,36 @@ def api_shared_usernames(request): blurblogs = [] for social_feed in social_feeds: - if not social_feed['shared_stories_count']: continue - blurblogs.append(dict(label="%s (%s %s)" % (social_feed['username'], - social_feed['shared_stories_count'], - 'story' if social_feed['shared_stories_count'] == 1 else 'stories'), - value="%s" % social_feed['user_id'])) - blurblogs = sorted(blurblogs, key=lambda b: b['label'].lower()) - catchall = dict(label="All Shared Stories", - value="all") + if not social_feed["shared_stories_count"]: + continue + blurblogs.append( + dict( + label="%s (%s %s)" + % ( + social_feed["username"], + social_feed["shared_stories_count"], + "story" if social_feed["shared_stories_count"] == 1 else "stories", + ), + value="%s" % social_feed["user_id"], + ) + ) + blurblogs = sorted(blurblogs, key=lambda b: b["label"].lower()) + catchall = dict(label="All Shared Stories", value="all") blurblogs.insert(0, catchall) - + return {"data": blurblogs} + @oauth_login_required @json.json_view def api_unread_story(request, trigger_slug=None): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - feed_or_folder = fields['feed_or_folder'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + feed_or_folder = fields["feed_or_folder"] entries = [] if isinstance(feed_or_folder, int) or feed_or_folder.isdigit(): @@ -326,8 +362,7 @@ def api_unread_story(request, trigger_slug=None): return dict(data=[]) found_feed_ids = [feed_id] found_trained_feed_ids = [feed_id] if usersub.is_trained else [] - stories = usersub.get_stories(order="newest", read_filter="unread", - offset=0, limit=limit) + stories = usersub.get_stories(order="newest", read_filter="unread", offset=0, limit=limit) else: folder_title = feed_or_folder if folder_title == "Top Level": @@ -337,11 +372,10 @@ def api_unread_story(request, trigger_slug=None): feed_ids = None if folder_title != "all": feed_ids = flat_folders.get(folder_title) - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter="unread") + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="unread") feed_ids = [sub.feed_id for sub in usersubs] params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": limit, @@ -351,261 +385,321 @@ def api_unread_story(request, trigger_slug=None): "cutoff_date": user.profile.unread_cutoff, } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) - mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') + mstories = MStory.objects(story_hash__in=story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) - + if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) for story in stories: - if before and int(story['story_date'].strftime("%s")) > before: continue - if after and int(story['story_date'].strftime("%s")) < after: continue + if before and int(story["story_date"].strftime("%s")) > before: + continue + if after and int(story["story_date"].strftime("%s")) < after: + continue score = 0 - if found_trained_feed_ids and story['story_feed_id'] in found_trained_feed_ids: - score = compute_story_score(story, classifier_titles=classifier_titles, - classifier_authors=classifier_authors, - classifier_tags=classifier_tags, - classifier_feeds=classifier_feeds) - if score < 0: continue - if trigger_slug == "new-unread-focus-story" and score < 1: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "StoryScore": score, - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['story_date'].strftime("%s")) - }, - }) - + if found_trained_feed_ids and story["story_feed_id"] in found_trained_feed_ids: + score = compute_story_score( + story, + classifier_titles=classifier_titles, + classifier_authors=classifier_authors, + classifier_tags=classifier_tags, + classifier_feeds=classifier_feeds, + ) + if score < 0: + continue + if trigger_slug == "new-unread-focus-story" and score < 1: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "StoryScore": score, + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["story_date"].strftime("%s"))}, + } + ) + if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries))) - + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FYChecking unread%s stories with ~SB~FCIFTTT~SN~FY: ~SB%s~SN - ~SB%s~SN stories" + % (" ~SBfocus~SN" if trigger_slug == "new-unread-focus-story" else "", feed_or_folder, len(entries)), + ) + return {"data": entries[:limit]} + @oauth_login_required @json.json_view def api_saved_story(request): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - story_tag = fields['story_tag'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + story_tag = fields["story_tag"] entries = [] - + if story_tag == "all": story_tag = "" - + params = dict(user_id=user.pk) if story_tag: params.update(dict(user_tags__contains=story_tag)) - mstories = MStarredStory.objects(**params).order_by('-starred_date')[:limit] - stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) + mstories = MStarredStory.objects(**params).order_by("-starred_date")[:limit] + stories = Feed.format_stories(mstories) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) for story in stories: - if before and int(story['story_date'].strftime("%s")) > before: continue - if after and int(story['story_date'].strftime("%s")) < after: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "SavedAt": story['starred_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "Tags": ', '.join(story['user_tags']), - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['starred_date'].strftime("%s")) - }, - }) + if before and int(story["story_date"].strftime("%s")) > before: + continue + if after and int(story["story_date"].strftime("%s")) < after: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "SavedAt": story["starred_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "Tags": ", ".join(story["user_tags"]), + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["starred_date"].strftime("%s"))}, + } + ) if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FCChecking saved stories from ~SBIFTTT~SB: ~SB%s~SN - ~SB%s~SN stories" % (story_tag if story_tag else "[All stories]", len(entries))) - + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FCChecking saved stories from ~SBIFTTT~SB: ~SB%s~SN - ~SB%s~SN stories" + % (story_tag if story_tag else "[All stories]", len(entries)), + ) + return {"data": entries} - + + @oauth_login_required @json.json_view def api_shared_story(request): user = request.user body = request.body_json - after = body.get('after', None) - before = body.get('before', None) - limit = body.get('limit', 50) - fields = body.get('triggerFields') - blurblog_user = fields['blurblog_user'] + after = body.get("after", None) + before = body.get("before", None) + limit = body.get("limit", 50) + fields = body.get("triggerFields") + blurblog_user = fields["blurblog_user"] entries = [] - + if isinstance(blurblog_user, int) or blurblog_user.isdigit(): social_user_ids = [int(blurblog_user)] elif blurblog_user == "all": socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) social_user_ids = [ss.subscription_user_id for ss in socialsubs] - mstories = MSharedStory.objects( - user_id__in=social_user_ids - ).order_by('-shared_date')[:limit] + mstories = MSharedStory.objects(user_id__in=social_user_ids).order_by("-shared_date")[:limit] stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) - share_user_ids = list(set([story['user_id'] for story in stories])) - users = dict([(u.pk, u.username) - for u in User.objects.filter(pk__in=share_user_ids).only('pk', 'username')]) - feeds = dict([(f.pk, { - "title": f.feed_title, - "website": f.feed_link, - "address": f.feed_address, - }) for f in Feed.objects.filter(pk__in=found_feed_ids)]) - - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) + share_user_ids = list(set([story["user_id"] for story in stories])) + users = dict( + [(u.pk, u.username) for u in User.objects.filter(pk__in=share_user_ids).only("pk", "username")] + ) + feeds = dict( + [ + ( + f.pk, + { + "title": f.feed_title, + "website": f.feed_link, + "address": f.feed_address, + }, + ) + for f in Feed.objects.filter(pk__in=found_feed_ids) + ] + ) + + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id__in=social_user_ids)) # Merge with feed specific classifiers - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_feed_ids)) - + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=user.pk, feed_id__in=found_feed_ids) + ) + for story in stories: - if before and int(story['shared_date'].strftime("%s")) > before: continue - if after and int(story['shared_date'].strftime("%s")) < after: continue - score = compute_story_score(story, classifier_titles=classifier_titles, - classifier_authors=classifier_authors, - classifier_tags=classifier_tags, - classifier_feeds=classifier_feeds) - if score < 0: continue - feed = feeds.get(story['story_feed_id'], None) - entries.append({ - "StoryTitle": story['story_title'], - "StoryContent": story['story_content'], - "StoryURL": story['story_permalink'], - "StoryAuthor": story['story_authors'], - "PublishedAt": story['story_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "StoryScore": score, - "Comments": story['comments'], - "Username": users.get(story['user_id']), - "SharedAt": story['shared_date'].strftime("%Y-%m-%dT%H:%M:%SZ"), - "Site": feed and feed['title'], - "SiteURL": feed and feed['website'], - "SiteRSS": feed and feed['address'], - "meta": { - "id": story['story_hash'], - "timestamp": int(story['shared_date'].strftime("%s")) - }, - }) + if before and int(story["shared_date"].strftime("%s")) > before: + continue + if after and int(story["shared_date"].strftime("%s")) < after: + continue + score = compute_story_score( + story, + classifier_titles=classifier_titles, + classifier_authors=classifier_authors, + classifier_tags=classifier_tags, + classifier_feeds=classifier_feeds, + ) + if score < 0: + continue + feed = feeds.get(story["story_feed_id"], None) + entries.append( + { + "StoryTitle": story["story_title"], + "StoryContent": story["story_content"], + "StoryURL": story["story_permalink"], + "StoryAuthor": story["story_authors"], + "PublishedAt": story["story_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "StoryScore": score, + "Comments": story["comments"], + "Username": users.get(story["user_id"]), + "SharedAt": story["shared_date"].strftime("%Y-%m-%dT%H:%M:%SZ"), + "Site": feed and feed["title"], + "SiteURL": feed and feed["website"], + "SiteRSS": feed and feed["address"], + "meta": {"id": story["story_hash"], "timestamp": int(story["shared_date"].strftime("%s"))}, + } + ) if after: - entries = sorted(entries, key=lambda s: s['meta']['timestamp']) - - logging.user(request, "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" % (blurblog_user, len(entries))) + entries = sorted(entries, key=lambda s: s["meta"]["timestamp"]) + + logging.user( + request, + "~FMChecking shared stories from ~SB~FCIFTTT~SN~FM: ~SB~FM%s~FM~SN - ~SB%s~SN stories" + % (blurblog_user, len(entries)), + ) return {"data": entries} + @json.json_view def ifttt_status(request): logging.user(request, "~FCChecking ~SBIFTTT~SN status") - return {"data": { - "status": "OK", - "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), - }} + return { + "data": { + "status": "OK", + "time": datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"), + } + } + @oauth_login_required @json.json_view def api_share_new_story(request): user = request.user body = request.body_json - fields = body.get('actionFields') - story_url = urlnorm.normalize(fields['story_url']) - story_content = fields.get('story_content', "") - story_title = fields.get('story_title', "") - story_author = fields.get('story_author', "") - comments = fields.get('comments', None) - + fields = body.get("actionFields") + story_url = urlnorm.normalize(fields["story_url"]) + story_content = fields.get("story_content", "") + story_title = fields.get("story_title", "") + story_author = fields.get("story_author", "") + comments = fields.get("comments", None) + logging.user(request.user, "~FBFinding feed (api_share_new_story): %s" % story_url) original_feed = Feed.get_feed_from_url(story_url, create=True, fetch=True) story_hash = MStory.guid_hash_unsaved(story_url) - feed_id = (original_feed and original_feed.pk or 0) + feed_id = original_feed and original_feed.pk or 0 if not user.profile.is_premium and MSharedStory.feed_quota(user.pk, story_hash, feed_id=feed_id): - return {"errors": [{ - 'message': 'Only premium users can share multiple stories per day from the same site.' - }]} - + return { + "errors": [ + {"message": "Only premium users can share multiple stories per day from the same site."} + ] + } + quota = 3 if MSharedStory.feed_quota(user.pk, story_hash, quota=quota): - logging.user(request, "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments)) - return {"errors": [{ - 'message': 'You can only share %s stories per day.' % quota - }]} - + logging.user( + request, + "~BM~FRNOT ~FYSharing story from ~SB~FCIFTTT~FY, over quota: ~SB%s: %s" % (story_url, comments), + ) + return {"errors": [{"message": "You can only share %s stories per day." % quota}]} + if not story_content or not story_title: ti = TextImporter(feed=original_feed, story_url=story_url, request=request) original_story = ti.fetch(return_document=True) if original_story: - story_url = original_story['url'] + story_url = original_story["url"] if not story_content: - story_content = original_story['content'] + story_content = original_story["content"] if not story_title: - story_title = original_story['title'] - + story_title = original_story["title"] + if story_content: story_content = lxml.html.fromstring(story_content) story_content.make_links_absolute(story_url) story_content = lxml.html.tostring(story_content) - - shared_story = MSharedStory.objects.filter(user_id=user.pk, - story_feed_id=original_feed and original_feed.pk or 0, - story_guid=story_url).limit(1).first() + + shared_story = ( + MSharedStory.objects.filter( + user_id=user.pk, story_feed_id=original_feed and original_feed.pk or 0, story_guid=story_url + ) + .limit(1) + .first() + ) if not shared_story: - title_max = MSharedStory._fields['story_title'].max_length + title_max = MSharedStory._fields["story_title"].max_length story_db = { "story_guid": story_url, "story_permalink": story_url, @@ -624,107 +718,121 @@ def api_share_new_story(request): for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() - logging.user(request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) + logging.user( + request, "~BM~FYSharing story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments) + ) except NotUniqueError: - logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) + logging.user( + request, + "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments), + ) else: - logging.user(request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments)) - + logging.user( + request, "~BM~FY~SBAlready~SN shared story from ~SB~FCIFTTT~FY: ~SB%s: %s" % (story_url, comments) + ) + try: - socialsub = MSocialSubscription.objects.get(user_id=user.pk, - subscription_user_id=user.pk) + socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=user.pk) except MSocialSubscription.DoesNotExist: socialsub = None - + if socialsub and shared_story: - socialsub.mark_story_ids_as_read([shared_story.story_hash], - shared_story.story_feed_id, - request=request) + socialsub.mark_story_ids_as_read( + [shared_story.story_hash], shared_story.story_feed_id, request=request + ) elif shared_story: RUserStory.mark_read(user.pk, shared_story.story_feed_id, shared_story.story_hash) - + if shared_story: shared_story.publish_update_to_subscribers() - - return {"data": [{ - "id": shared_story and shared_story.story_guid, - "url": shared_story and shared_story.blurblog_permalink() - }]} + + return { + "data": [ + { + "id": shared_story and shared_story.story_guid, + "url": shared_story and shared_story.blurblog_permalink(), + } + ] + } + @oauth_login_required @json.json_view def api_save_new_story(request): user = request.user body = request.body_json - fields = body.get('actionFields') - story_url = urlnorm.normalize(fields['story_url']) - story_content = fields.get('story_content', "") - story_title = fields.get('story_title', "") - story_author = fields.get('story_author', "") - user_tags = fields.get('user_tags', "") + fields = body.get("actionFields") + story_url = urlnorm.normalize(fields["story_url"]) + story_content = fields.get("story_content", "") + story_title = fields.get("story_title", "") + story_author = fields.get("story_author", "") + user_tags = fields.get("user_tags", "") story = None - + logging.user(request.user, "~FBFinding feed (api_save_new_story): %s" % story_url) original_feed = Feed.get_feed_from_url(story_url) if not story_content or not story_title: ti = TextImporter(feed=original_feed, story_url=story_url, request=request) original_story = ti.fetch(return_document=True) if original_story: - story_url = original_story['url'] + story_url = original_story["url"] if not story_content: - story_content = original_story['content'] + story_content = original_story["content"] if not story_title: - story_title = original_story['title'] + story_title = original_story["title"] try: story_db = { "user_id": user.pk, "starred_date": datetime.datetime.now(), "story_date": datetime.datetime.now(), - "story_title": story_title or '[Untitled]', + "story_title": story_title or "[Untitled]", "story_permalink": story_url, "story_guid": story_url, "story_content": story_content, "story_author_name": story_author, "story_feed_id": original_feed and original_feed.pk or 0, - "user_tags": [tag for tag in user_tags.split(',')] + "user_tags": [tag for tag in user_tags.split(",")], } story = MStarredStory.objects.create(**story_db) - logging.user(request, "~FCStarring by ~SBIFTTT~SN: ~SB%s~SN in ~SB%s" % (story_db['story_title'][:50], original_feed and original_feed)) + logging.user( + request, + "~FCStarring by ~SBIFTTT~SN: ~SB%s~SN in ~SB%s" + % (story_db["story_title"][:50], original_feed and original_feed), + ) MStarredStoryCounts.count_for_user(user.pk) except OperationError: - logging.user(request, "~FCAlready starred by ~SBIFTTT~SN: ~SB%s" % (story_db['story_title'][:50])) + logging.user(request, "~FCAlready starred by ~SBIFTTT~SN: ~SB%s" % (story_db["story_title"][:50])) pass - - return {"data": [{ - "id": story and story.id, - "url": story and story.story_permalink - }]} + + return {"data": [{"id": story and story.id, "url": story and story.story_permalink}]} + @oauth_login_required @json.json_view def api_save_new_subscription(request): user = request.user body = request.body_json - fields = body.get('actionFields') - url = urlnorm.normalize(fields['url']) - folder = fields['folder'] - + fields = body.get("actionFields") + url = urlnorm.normalize(fields["url"]) + folder = fields["folder"] + if folder == "Top Level": folder = " " - + code, message, us = UserSubscription.add_subscription( - user=user, - feed_address=url, - folder=folder, - bookmarklet=True + user=user, feed_address=url, folder=folder, bookmarklet=True ) - + logging.user(request, "~FRAdding URL from ~FC~SBIFTTT~SN~FR: ~SB%s (in %s)" % (url, folder)) if us and us.feed: url = us.feed.feed_address - return {"data": [{ - "id": us and us.feed_id, - "url": url, - }]} + return { + "data": [ + { + "id": us and us.feed_id, + "url": url, + } + ] + } diff --git a/apps/profile/factories.py b/apps/profile/factories.py index b5b57d51bf..3c20c25200 100644 --- a/apps/profile/factories.py +++ b/apps/profile/factories.py @@ -3,18 +3,19 @@ from django.contrib.auth.models import User from apps.profile.models import Profile + class UserFactory(DjangoModelFactory): - first_name = factory.Faker('first_name') - last_name = factory.Faker('last_name') - username = factory.Faker('email') - date_joined = factory.Faker('date_time') + first_name = factory.Faker("first_name") + last_name = factory.Faker("last_name") + username = factory.Faker("email") + date_joined = factory.Faker("date_time") class Meta: model = User - class ProfileFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) + class Meta: model = Profile diff --git a/apps/profile/forms.py b/apps/profile/forms.py index b47a06a3da..f5adc12a38 100644 --- a/apps/profile/forms.py +++ b/apps/profile/forms.py @@ -14,135 +14,133 @@ ("newsblur-premium-pro", mark_safe("$299 / year (~$25/month)")), ] + class HorizRadioRenderer(forms.RadioSelect): - """ this overrides widget method to put radio buttons horizontally - instead of vertically. + """this overrides widget method to put radio buttons horizontally + instead of vertically. """ + def render(self, name, value, attrs=None, renderer=None): - """Outputs radios""" - choices = '\n'.join(['%s\n' % w for w in self]) - return mark_safe('
%s
' % choices) + """Outputs radios""" + choices = "\n".join(["%s\n" % w for w in self]) + return mark_safe('
%s
' % choices) + class StripePlusPaymentForm(StripePaymentForm): def __init__(self, *args, **kwargs): - email = kwargs.pop('email') - plan = kwargs.pop('plan', '') + email = kwargs.pop("email") + plan = kwargs.pop("plan", "") super(StripePlusPaymentForm, self).__init__(*args, **kwargs) - self.fields['email'].initial = email + self.fields["email"].initial = email if plan: - self.fields['plan'].initial = plan + self.fields["plan"].initial = plan - email = forms.EmailField(widget=forms.TextInput(attrs=dict(maxlength=75)), - label='Email address', - required=False) - plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, - choices=PLANS, label='Plan') + email = forms.EmailField( + widget=forms.TextInput(attrs=dict(maxlength=75)), label="Email address", required=False + ) + plan = forms.ChoiceField(required=False, widget=forms.RadioSelect, choices=PLANS, label="Plan") class DeleteAccountForm(forms.Form): - password = forms.CharField(widget=forms.PasswordInput(), - label="Confirm your password", - required=False) - confirm = forms.CharField(label="Type \"Delete\" to confirm", - widget=forms.TextInput(), - required=False) + password = forms.CharField(widget=forms.PasswordInput(), label="Confirm your password", required=False) + confirm = forms.CharField(label='Type "Delete" to confirm', widget=forms.TextInput(), required=False) def __init__(self, *args, **kwargs): - self.user = kwargs.pop('user') + self.user = kwargs.pop("user") super(DeleteAccountForm, self).__init__(*args, **kwargs) - + def clean_password(self): - user_auth = authenticate(username=self.user.username, - password=self.cleaned_data['password']) + user_auth = authenticate(username=self.user.username, password=self.cleaned_data["password"]) if not user_auth: user_auth = blank_authenticate(username=self.user.username) - + if not user_auth: - raise forms.ValidationError('Your password doesn\'t match.') + raise forms.ValidationError("Your password doesn't match.") - return self.cleaned_data['password'] + return self.cleaned_data["password"] def clean_confirm(self): - if self.cleaned_data.get('confirm', "").lower() != "delete": + if self.cleaned_data.get("confirm", "").lower() != "delete": raise forms.ValidationError('Please type "DELETE" to confirm deletion.') - return self.cleaned_data['confirm'] + return self.cleaned_data["confirm"] + class ForgotPasswordForm(forms.Form): - email = forms.CharField(widget=forms.TextInput(), - label="Your email address", - required=False) + email = forms.CharField(widget=forms.TextInput(), label="Your email address", required=False) def __init__(self, *args, **kwargs): super(ForgotPasswordForm, self).__init__(*args, **kwargs) - + def clean_email(self): - if not self.cleaned_data['email']: - raise forms.ValidationError('Please enter in an email address.') + if not self.cleaned_data["email"]: + raise forms.ValidationError("Please enter in an email address.") try: - User.objects.get(email__iexact=self.cleaned_data['email']) + User.objects.get(email__iexact=self.cleaned_data["email"]) except User.MultipleObjectsReturned: pass except User.DoesNotExist: - raise forms.ValidationError('No user has that email address.') + raise forms.ValidationError("No user has that email address.") + + return self.cleaned_data["email"] - return self.cleaned_data['email'] class ForgotPasswordReturnForm(forms.Form): - password = forms.CharField(widget=forms.PasswordInput(), - label="Your new password", - required=False) + password = forms.CharField(widget=forms.PasswordInput(), label="Your new password", required=False) + class AccountSettingsForm(forms.Form): use_required_attribute = False - username = forms.RegexField(regex=r'^\w+$', - max_length=30, - widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='username', - required=False, - error_messages={ - 'invalid': "Your username may only contain letters and numbers." - }) - email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), - label='email address', - required=True, - error_messages={'required': 'Please enter an email.'}) - new_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), - label='password', - required=False) - # error_messages={'required': 'Please enter a password.'}) - old_password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}), - label='password', - required=False) - custom_js = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='custom_js', - required=False) - custom_css = forms.CharField(widget=forms.TextInput(attrs={'class': 'NB-input'}), - label='custom_css', - required=False) - + username = forms.RegexField( + regex=r"^\w+$", + max_length=30, + widget=forms.TextInput(attrs={"class": "NB-input"}), + label="username", + required=False, + error_messages={"invalid": "Your username may only contain letters and numbers."}, + ) + email = forms.EmailField( + widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}), + label="email address", + required=True, + error_messages={"required": "Please enter an email."}, + ) + new_password = forms.CharField( + widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False + ) + # error_messages={'required': 'Please enter a password.'}) + old_password = forms.CharField( + widget=forms.PasswordInput(attrs={"class": "NB-input"}), label="password", required=False + ) + custom_js = forms.CharField( + widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_js", required=False + ) + custom_css = forms.CharField( + widget=forms.TextInput(attrs={"class": "NB-input"}), label="custom_css", required=False + ) + def __init__(self, user, *args, **kwargs): self.user = user super(AccountSettingsForm, self).__init__(*args, **kwargs) - + def clean_username(self): - username = self.cleaned_data['username'] + username = self.cleaned_data["username"] return username def clean_password(self): - if not self.cleaned_data['password']: + if not self.cleaned_data["password"]: return "" - return self.cleaned_data['password'] - + return self.cleaned_data["password"] + def clean_email(self): - return self.cleaned_data['email'] - + return self.cleaned_data["email"] + def clean(self): - username = self.cleaned_data.get('username', '') - new_password = self.cleaned_data.get('new_password', '') - old_password = self.cleaned_data.get('old_password', '') - email = self.cleaned_data.get('email', None) - + username = self.cleaned_data.get("username", "") + new_password = self.cleaned_data.get("new_password", "") + old_password = self.cleaned_data.get("old_password", "") + email = self.cleaned_data.get("email", None) + if username and self.user.username != username: try: User.objects.get(username__iexact=username) @@ -150,26 +148,28 @@ def clean(self): pass else: raise forms.ValidationError("This username is already taken. Try something different.") - + if self.user.email != email: if email and User.objects.filter(email__iexact=email).count(): - raise forms.ValidationError("This email is already being used by another account. Try something different.") - + raise forms.ValidationError( + "This email is already being used by another account. Try something different." + ) + if old_password or new_password: code = change_password(self.user, old_password, new_password, only_check=True) if code <= 0: - raise forms.ValidationError("Your old password is incorrect.") + raise forms.ValidationError("Your old password is incorrect.") return self.cleaned_data - + def save(self, profile_callback=None): - username = self.cleaned_data['username'] - new_password = self.cleaned_data.get('new_password', None) - old_password = self.cleaned_data.get('old_password', None) - email = self.cleaned_data.get('email', None) - custom_css = self.cleaned_data.get('custom_css', None) - custom_js = self.cleaned_data.get('custom_js', None) - + username = self.cleaned_data["username"] + new_password = self.cleaned_data.get("new_password", None) + old_password = self.cleaned_data.get("old_password", None) + email = self.cleaned_data.get("email", None) + custom_css = self.cleaned_data.get("custom_css", None) + custom_js = self.cleaned_data.get("custom_js", None) + if username and self.user.username != username: change_password(self.user, self.user.username, username) self.user.username = username @@ -178,28 +178,26 @@ def save(self, profile_callback=None): social_profile.username = username social_profile.save() - self.user.profile.update_email(email) - + if old_password or new_password: change_password(self.user, old_password, new_password) - + MCustomStyling.save_user(self.user.pk, custom_css, custom_js) - + + class RedeemCodeForm(forms.Form): use_required_attribute = False - gift_code = forms.CharField(widget=forms.TextInput(), - label="Gift code", - required=True) - + gift_code = forms.CharField(widget=forms.TextInput(), label="Gift code", required=True) + def clean_gift_code(self): - gift_code = self.cleaned_data['gift_code'] - - gift_code = re.sub(r'[^a-zA-Z0-9]', '', gift_code).lower() + gift_code = self.cleaned_data["gift_code"] + + gift_code = re.sub(r"[^a-zA-Z0-9]", "", gift_code).lower() if len(gift_code) != 12: - raise forms.ValidationError('Your gift code should be 12 characters long.') - + raise forms.ValidationError("Your gift code should be 12 characters long.") + newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code) if newsblur_gift_code: @@ -208,15 +206,17 @@ def clean_gift_code(self): return newsblur_gift_code.gift_code else: # Thinkup / Good Web Bundle - req = requests.get('https://www.thinkup.com/join/api/bundle/', params={'code': gift_code}) + req = requests.get("https://www.thinkup.com/join/api/bundle/", params={"code": gift_code}) response = req.json() - - is_valid = response.get('is_valid', None) + + is_valid = response.get("is_valid", None) if is_valid: return gift_code elif is_valid == False: - raise forms.ValidationError('Your gift code is invalid. Check it for errors.') - elif response.get('error', None): - raise forms.ValidationError('Your gift code is invalid, says the server: %s' % response['error']) - + raise forms.ValidationError("Your gift code is invalid. Check it for errors.") + elif response.get("error", None): + raise forms.ValidationError( + "Your gift code is invalid, says the server: %s" % response["error"] + ) + return gift_code diff --git a/apps/profile/management/commands/check_db.py b/apps/profile/management/commands/check_db.py index 941f5b57e5..c8aa983a25 100644 --- a/apps/profile/management/commands/check_db.py +++ b/apps/profile/management/commands/check_db.py @@ -3,10 +3,10 @@ from django.db import connections from django.db.utils import OperationalError -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - db_conn = connections['default'] + db_conn = connections["default"] connected = False while not connected: try: diff --git a/apps/profile/management/commands/fp.py b/apps/profile/management/commands/fp.py index 21b055f564..1f7ed64f8a 100644 --- a/apps/profile/management/commands/fp.py +++ b/apps/profile/management/commands/fp.py @@ -1,15 +1,15 @@ from django.core.management.base import BaseCommand from django.contrib.auth.models import User -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-e", "--email", dest="email", nargs=1, help="Specify email if it doesn't exist") def handle(self, *args, **options): - username = options.get('username') - email = options.get('email') + username = options.get("username") + email = options.get("email") user = None if username: try: @@ -30,11 +30,9 @@ def handle(self, *args, **options): user = users[0] except User.DoesNotExist: print(" ---> No email found at: %s" % email) - + if user: email = options.get("email") or user.email user.profile.send_forgot_password_email(email) else: print(" ---> No user/email found at: %s/%s" % (username, email)) - - \ No newline at end of file diff --git a/apps/profile/management/commands/reimport_paypal_history.py b/apps/profile/management/commands/reimport_paypal_history.py index 10c4afd6bd..4b6e943b4d 100644 --- a/apps/profile/management/commands/reimport_paypal_history.py +++ b/apps/profile/management/commands/reimport_paypal_history.py @@ -7,25 +7,46 @@ from utils import log as logging from apps.profile.models import Profile, PaymentHistory -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back") - parser.add_argument("-o", "--offset", dest="offset", nargs=1, type=int, default=0, help="Offset customer (in date DESC)") - parser.add_argument("-f", "--force", dest="force", nargs=1, type=bool, default=False, help="Force reimport for every user") - + parser.add_argument( + "-d", "--days", dest="days", nargs=1, type=int, default=365, help="Number of days to go back" + ) + parser.add_argument( + "-o", + "--offset", + dest="offset", + nargs=1, + type=int, + default=0, + help="Offset customer (in date DESC)", + ) + parser.add_argument( + "-f", + "--force", + dest="force", + nargs=1, + type=bool, + default=False, + help="Force reimport for every user", + ) + def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET - week = datetime.datetime.now() - datetime.timedelta(days=int(options.get('days'))) + week = datetime.datetime.now() - datetime.timedelta(days=int(options.get("days"))) failed = [] limit = 100 - offset = options.get('offset') - + offset = options.get("offset") + while True: logging.debug(" ---> At %s" % offset) - user_ids = PaymentHistory.objects.filter(payment_provider='paypal', - payment_date__gte=week).values('user_id').distinct()[offset:offset+limit] - user_ids = [u['user_id'] for u in user_ids] + user_ids = ( + PaymentHistory.objects.filter(payment_provider="paypal", payment_date__gte=week) + .values("user_id") + .distinct()[offset : offset + limit] + ) + user_ids = [u["user_id"] for u in user_ids] if not len(user_ids): logging.debug("At %s, finished" % offset) break @@ -36,7 +57,7 @@ def handle(self, *args, **options): except User.DoesNotExist: logging.debug(" ***> Couldn't find paypal user_id=%s" % user_id) failed.append(user_id) - + if not user.profile.is_premium: user.profile.activate_premium() elif user.payments.all().count() != 1: @@ -45,10 +66,9 @@ def handle(self, *args, **options): user.profile.setup_premium_history() elif user.profile.premium_expire > datetime.datetime.now() + datetime.timedelta(days=365): user.profile.setup_premium_history() - elif options.get('force'): + elif options.get("force"): user.profile.setup_premium_history() else: logging.debug(" ---> %s is fine" % user.username) return failed - diff --git a/apps/profile/management/commands/reimport_stripe_history.py b/apps/profile/management/commands/reimport_stripe_history.py index fe00e10bdb..6f8e803e43 100644 --- a/apps/profile/management/commands/reimport_stripe_history.py +++ b/apps/profile/management/commands/reimport_stripe_history.py @@ -6,16 +6,29 @@ from utils import log as logging from apps.profile.models import Profile + class Command(BaseCommand): - def add_arguments(self, parser) - parser.add_argument("-d", "--days", dest="days", nargs=1, type='int', default=365, help="Number of days to go back") - parser.add_argument("-l", "--limit", dest="limit", nargs=1, type='int', default=100, help="Charges per batch") - parser.add_argument("-s", "--start", dest="start", nargs=1, type='string', default=None, help="Offset customer_id (starting_after)") + def add_arguments(self, parser): + parser.add_argument( + "-d", "--days", dest="days", nargs=1, type="int", default=365, help="Number of days to go back" + ) + parser.add_argument( + "-l", "--limit", dest="limit", nargs=1, type="int", default=100, help="Charges per batch" + ) + parser.add_argument( + "-s", + "--start", + dest="start", + nargs=1, + type="string", + default=None, + help="Offset customer_id (starting_after)", + ) def handle(self, *args, **options): - limit = options.get('limit') - days = int(options.get('days')) - starting_after = options.get('start') - - Profile.reimport_stripe_history(limit, days, starting_after) \ No newline at end of file + limit = options.get("limit") + days = int(options.get("days")) + starting_after = options.get("start") + + Profile.reimport_stripe_history(limit, days, starting_after) diff --git a/apps/profile/management/commands/remove_last_user.py b/apps/profile/management/commands/remove_last_user.py index 3e6a07883f..f61b9b188e 100644 --- a/apps/profile/management/commands/remove_last_user.py +++ b/apps/profile/management/commands/remove_last_user.py @@ -5,11 +5,12 @@ from django.core.management.base import BaseCommand from apps.profile.models import Profile -class Command(BaseCommand): + +class Command(BaseCommand): def handle(self, *args, **options): user = User.objects.last() - profile = Profile.objects.get(user=user) + profile = Profile.objects.get(user=user) profile.delete() user.delete() - print("User and profile for user {0} deleted".format(user)) \ No newline at end of file + print("User and profile for user {0} deleted".format(user)) diff --git a/apps/profile/middleware.py b/apps/profile/middleware.py index 4a0d23e16a..9710f466c2 100644 --- a/apps/profile/middleware.py +++ b/apps/profile/middleware.py @@ -19,16 +19,16 @@ def __init__(self, get_response=None): def process_response(self, request, response): if ( ( - request.path == '/' - or request.path.startswith('/reader/refresh_feeds') - or request.path.startswith('/reader/load_feeds') - or request.path.startswith('/reader/feeds') + request.path == "/" + or request.path.startswith("/reader/refresh_feeds") + or request.path.startswith("/reader/load_feeds") + or request.path.startswith("/reader/feeds") ) - and hasattr(request, 'user') + and hasattr(request, "user") and request.user.is_authenticated ): hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=60) - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] if request.user.profile.last_seen_on < hour_ago: logging.user( request, "~FG~BBRepeat visitor: ~SB%s (%s)" % (request.user.profile.last_seen_on, ip) @@ -50,11 +50,11 @@ def process_response(self, request, response): def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -65,31 +65,31 @@ def __init__(self, get_response=None): self.get_response = get_response def process_request(self, request): - setattr(request, 'activated_segments', []) + setattr(request, "activated_segments", []) if ( - # request.path.startswith('/reader/feed') or - request.path.startswith('/reader/feed/') + # request.path.startswith('/reader/feed') or + request.path.startswith("/reader/feed/") ) and random.random() < 0.05: - request.activated_segments.append('db_profiler') + request.activated_segments.append("db_profiler") connection.use_debug_cursor = True - setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) + setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG) settings.DEBUG = True def process_celery(self): - setattr(self, 'activated_segments', []) + setattr(self, "activated_segments", []) if random.random() < 0.01 or settings.DEBUG_QUERIES: - self.activated_segments.append('db_profiler') + self.activated_segments.append("db_profiler") connection.use_debug_cursor = True - setattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG) + setattr(settings, "ORIGINAL_DEBUG", settings.DEBUG) settings.DEBUG = True return self def process_exception(self, request, exception): - if hasattr(request, 'sql_times_elapsed'): + if hasattr(request, "sql_times_elapsed"): self._save_times(request.sql_times_elapsed) def process_response(self, request, response): - if hasattr(request, 'sql_times_elapsed'): + if hasattr(request, "sql_times_elapsed"): # middleware = SQLLogToConsoleMiddleware() # middleware.process_celery(self) # logging.debug(" ---> ~FGProfiling~FB app: %s" % request.sql_times_elapsed) @@ -99,16 +99,16 @@ def process_response(self, request, response): def process_celery_finished(self): middleware = SQLLogToConsoleMiddleware() middleware.process_celery(self) - if hasattr(self, 'sql_times_elapsed'): + if hasattr(self, "sql_times_elapsed"): logging.debug(" ---> ~FGProfiling~FB task: %s" % self.sql_times_elapsed) - self._save_times(self.sql_times_elapsed, 'task_') + self._save_times(self.sql_times_elapsed, "task_") def process_request_finished(self): middleware = SQLLogToConsoleMiddleware() middleware.process_celery(self) - if hasattr(self, 'sql_times_elapsed'): + if hasattr(self, "sql_times_elapsed"): logging.debug(" ---> ~FGProfiling~FB app: %s" % self.sql_times_elapsed) - self._save_times(self.sql_times_elapsed, 'app_') + self._save_times(self.sql_times_elapsed, "app_") def _save_times(self, db_times, prefix=""): if not db_times: @@ -118,7 +118,7 @@ def _save_times(self, db_times, prefix=""): pipe = r.pipeline() minute = round_time(round_to=60) for db, duration in list(db_times.items()): - key = "DB:%s%s:%s" % (prefix, db, minute.strftime('%s')) + key = "DB:%s%s:%s" % (prefix, db, minute.strftime("%s")) pipe.incr("%s:c" % key) pipe.expireat("%s:c" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) if duration: @@ -128,11 +128,11 @@ def _save_times(self, db_times, prefix=""): def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -144,7 +144,7 @@ def __init__(self, get_response=None): def activated(self, request): return settings.DEBUG_QUERIES or ( - hasattr(request, 'activated_segments') and 'db_profiler' in request.activated_segments + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments ) def process_response(self, request, response): @@ -152,38 +152,39 @@ def process_response(self, request, response): return response if connection.queries: queries = connection.queries - if getattr(connection, 'queriesx', False): + if getattr(connection, "queriesx", False): queries.extend(connection.queriesx) connection.queriesx = [] - time_elapsed = sum([float(q['time']) for q in connection.queries]) + time_elapsed = sum([float(q["time"]) for q in connection.queries]) for query in queries: - sql_time = float(query['time']) - query['color'] = '~FC' if sql_time < 0.015 else '~FK~SB' if sql_time < 0.05 else '~FR~SB' - if query.get('mongo'): - query['sql'] = "~FM%s %s: %s" % (query['mongo']['op'], query['mongo']['collection'], query['mongo']['query']) - elif query.get('redis_user'): - query['sql'] = "~FC%s" % (query['redis_user']['query']) - elif query.get('redis_story'): - query['sql'] = "~FC%s" % (query['redis_story']['query']) - elif query.get('redis_session'): - query['sql'] = "~FC%s" % (query['redis_session']['query']) - elif query.get('redis_pubsub'): - query['sql'] = "~FC%s" % (query['redis_pubsub']['query']) - elif query.get('db_redis'): - query['sql'] = "~FC%s" % (query['db_redis']['query']) - elif 'sql' not in query: + sql_time = float(query["time"]) + query["color"] = "~FC" if sql_time < 0.015 else "~FK~SB" if sql_time < 0.05 else "~FR~SB" + if query.get("mongo"): + query["sql"] = "~FM%s %s: %s" % ( + query["mongo"]["op"], + query["mongo"]["collection"], + query["mongo"]["query"], + ) + elif query.get("redis_user"): + query["sql"] = "~FC%s" % (query["redis_user"]["query"]) + elif query.get("redis_story"): + query["sql"] = "~FC%s" % (query["redis_story"]["query"]) + elif query.get("redis_session"): + query["sql"] = "~FC%s" % (query["redis_session"]["query"]) + elif query.get("redis_pubsub"): + query["sql"] = "~FC%s" % (query["redis_pubsub"]["query"]) + elif query.get("db_redis"): + query["sql"] = "~FC%s" % (query["db_redis"]["query"]) + elif "sql" not in query: logging.debug(" ***> Query log missing: %s" % query) else: - query['sql'] = re.sub(r'SELECT (.*?) FROM', 'SELECT * FROM', query['sql']) - query['sql'] = re.sub(r'SELECT', '~FYSELECT', query['sql']) - query['sql'] = re.sub(r'INSERT', '~FGINSERT', query['sql']) - query['sql'] = re.sub(r'UPDATE', '~FY~SBUPDATE', query['sql']) - query['sql'] = re.sub(r'DELETE', '~FR~SBDELETE', query['sql']) - - if ( - settings.DEBUG_QUERIES - and not getattr(settings, 'DEBUG_QUERIES_SUMMARY_ONLY', False) - ): + query["sql"] = re.sub(r"SELECT (.*?) FROM", "SELECT * FROM", query["sql"]) + query["sql"] = re.sub(r"SELECT", "~FYSELECT", query["sql"]) + query["sql"] = re.sub(r"INSERT", "~FGINSERT", query["sql"]) + query["sql"] = re.sub(r"UPDATE", "~FY~SBUPDATE", query["sql"]) + query["sql"] = re.sub(r"DELETE", "~FR~SBDELETE", query["sql"]) + + if settings.DEBUG_QUERIES and not getattr(settings, "DEBUG_QUERIES_SUMMARY_ONLY", False): t = Template( "{% for sql in sqllog %}{% if not forloop.first %} {% endif %}[{{forloop.counter}}] {{sql.color}}{{sql.time}}~SN~FW: {{sql.sql|safe}}{% if not forloop.last %}\n{% endif %}{% endfor %}" ) @@ -191,51 +192,51 @@ def process_response(self, request, response): t.render( Context( { - 'sqllog': queries, - 'count': len(queries), - 'time': time_elapsed, + "sqllog": queries, + "count": len(queries), + "time": time_elapsed, } ) ) ) times_elapsed = { - 'sql': sum( + "sql": sum( [ - float(q['time']) + float(q["time"]) for q in queries - if not q.get('mongo') - and not q.get('redis_user') - and not q.get('redis_story') - and not q.get('redis_session') - and not q.get('redis_pubsub') + if not q.get("mongo") + and not q.get("redis_user") + and not q.get("redis_story") + and not q.get("redis_session") + and not q.get("redis_pubsub") ] ), - 'mongo': sum([float(q['time']) for q in queries if q.get('mongo')]), - 'redis_user': sum([float(q['time']) for q in queries if q.get('redis_user')]), - 'redis_story': sum([float(q['time']) for q in queries if q.get('redis_story')]), - 'redis_session': sum([float(q['time']) for q in queries if q.get('redis_session')]), - 'redis_pubsub': sum([float(q['time']) for q in queries if q.get('redis_pubsub')]), + "mongo": sum([float(q["time"]) for q in queries if q.get("mongo")]), + "redis_user": sum([float(q["time"]) for q in queries if q.get("redis_user")]), + "redis_story": sum([float(q["time"]) for q in queries if q.get("redis_story")]), + "redis_session": sum([float(q["time"]) for q in queries if q.get("redis_session")]), + "redis_pubsub": sum([float(q["time"]) for q in queries if q.get("redis_pubsub")]), } - setattr(request, 'sql_times_elapsed', times_elapsed) + setattr(request, "sql_times_elapsed", times_elapsed) else: print(" ***> No queries") - if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): + if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG): settings.DEBUG = False return response def process_celery(self, profiler): self.process_response(profiler, None) - if not getattr(settings, 'ORIGINAL_DEBUG', settings.DEBUG): + if not getattr(settings, "ORIGINAL_DEBUG", settings.DEBUG): settings.DEBUG = False def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -246,7 +247,7 @@ def __call__(self, request): ("Ralph", "Me fail English? That's unpossible."), ( "Lionel Hutz", - "This is the greatest case of false advertising I've seen since I sued the movie \"The Never Ending Story.\"", + 'This is the greatest case of false advertising I\'ve seen since I sued the movie "The Never Ending Story."', ), ("Sideshow Bob", "No children have ever meddled with the Republican Party and lived to tell about it."), ( @@ -261,7 +262,7 @@ def __call__(self, request): ), ( "Comic Book Guy", - "Your questions have become more redundant and annoying than the last three \"Highlander\" movies.", + 'Your questions have become more redundant and annoying than the last three "Highlander" movies.', ), ("Chief Wiggum", "Uh, no, you got the wrong number. This is 9-1...2."), ( @@ -282,11 +283,11 @@ def __call__(self, request): ), ( "Lionel Hutz", - "Well, he's kind of had it in for me ever since I accidentally ran over his dog. Actually, replace \"accidentally\" with \"repeatedly\" and replace \"dog\" with \"son.\"", + 'Well, he\'s kind of had it in for me ever since I accidentally ran over his dog. Actually, replace "accidentally" with "repeatedly" and replace "dog" with "son."', ), ( "Comic Book Guy", - "Last night's \"Itchy and Scratchy Show\" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.", + 'Last night\'s "Itchy and Scratchy Show" was, without a doubt, the worst episode *ever.* Rest assured, I was on the Internet within minutes, registering my disgust throughout the world.', ), ("Homer", "I'm normally not a praying man, but if you're up there, please save me, Superman."), ("Homer", "Save me, Jeebus."), @@ -307,7 +308,7 @@ def __call__(self, request): ("Homer", "Fame was like a drug. But what was even more like a drug were the drugs."), ( "Homer", - "Books are useless! I only ever read one book, \"To Kill A Mockingbird,\" and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?", + 'Books are useless! I only ever read one book, "To Kill A Mockingbird," and it gave me absolutely no insight on how to kill mockingbirds! Sure it taught me not to judge a man by the color of his skin...but what good does *that* do me?', ), ( "Chief Wiggum", @@ -325,8 +326,8 @@ def __call__(self, request): "Homer", "You know, the one with all the well meaning rules that don't work out in real life, uh, Christianity.", ), - ("Smithers", "Uh, no, they're saying \"Boo-urns, Boo-urns.\""), - ("Hans Moleman", "I was saying \"Boo-urns.\""), + ("Smithers", 'Uh, no, they\'re saying "Boo-urns, Boo-urns."'), + ("Hans Moleman", 'I was saying "Boo-urns."'), ("Homer", "Kids, you tried your best and you failed miserably. The lesson is, never try."), ("Homer", "Here's to alcohol, the cause of - and solution to - all life's problems."), ( @@ -350,7 +351,7 @@ def __call__(self, request): ), ( "Troy McClure", - "Hi. I'm Troy McClure. You may remember me from such self-help tapes as \"Smoke Yourself Thin\" and \"Get Some Confidence, Stupid!\"", + 'Hi. I\'m Troy McClure. You may remember me from such self-help tapes as "Smoke Yourself Thin" and "Get Some Confidence, Stupid!"', ), ("Homer", "A woman is a lot like a refrigerator. Six feet tall, 300 pounds...it makes ice."), ( @@ -425,7 +426,7 @@ def __call__(self, request): ("Barney", "Jesus must be spinning in his grave!"), ( "Superintendent Chalmers", - "\"Thank the Lord\"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don't have a place within an organized religion.", + '"Thank the Lord"? That sounded like a prayer. A prayer in a public school. God has no place within these walls, just like facts don\'t have a place within an organized religion.', ), ("Mr Burns", "[answering the phone] Ahoy hoy?"), ("Comic Book Guy", "Oh, a *sarcasm* detector. Oh, that's a *really* useful invention!"), @@ -487,18 +488,18 @@ def __init__(self, get_response=None): def process_response(self, request, response): quote = random.choice(SIMPSONS_QUOTES) - source = quote[0].replace(' ', '-') + source = quote[0].replace(" ", "-") response["X-%s" % source] = quote[1] return response def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -515,11 +516,11 @@ def process_response(self, request, response): def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response @@ -530,7 +531,7 @@ def __init__(self, get_response=None): self.get_response = get_response def process_request(self, request): - setattr(request, 'start_time', time.time()) + setattr(request, "start_time", time.time()) def __call__(self, request): response = self.process_request(request) @@ -541,8 +542,8 @@ def __call__(self, request): BANNED_USER_AGENTS = ( - 'feed reader-background', - 'missing', + "feed reader-background", + "missing", ) BANNED_USERNAMES = () @@ -553,46 +554,46 @@ def __init__(self, get_response=None): self.get_response = get_response def process_request(self, request): - user_agent = request.environ.get('HTTP_USER_AGENT', 'missing').lower() + user_agent = request.environ.get("HTTP_USER_AGENT", "missing").lower() - if 'profile' in request.path: + if "profile" in request.path: return - if 'haproxy' in request.path: + if "haproxy" in request.path: return - if 'dbcheck' in request.path: + if "dbcheck" in request.path: return - if 'account' in request.path: + if "account" in request.path: return - if 'push' in request.path: + if "push" in request.path: return - if getattr(settings, 'TEST_DEBUG'): + if getattr(settings, "TEST_DEBUG"): return if any(ua in user_agent for ua in BANNED_USER_AGENTS): - data = {'error': 'User agent banned: %s' % user_agent, 'code': -1} + data = {"error": "User agent banned: %s" % user_agent, "code": -1} logging.user( request, "~FB~SN~BBBanned UA: ~SB%s / %s (%s)" % (user_agent, request.path, request.META) ) - return HttpResponse(json.encode(data), status=403, content_type='text/json') + return HttpResponse(json.encode(data), status=403, content_type="text/json") if request.user.is_authenticated and any( username == request.user.username for username in BANNED_USERNAMES ): - data = {'error': 'User banned: %s' % request.user.username, 'code': -1} + data = {"error": "User banned: %s" % request.user.username, "code": -1} logging.user( request, "~FB~SN~BBBanned Username: ~SB%s / %s (%s)" % (request.user, request.path, request.META), ) - return HttpResponse(json.encode(data), status=403, content_type='text/json') + return HttpResponse(json.encode(data), status=403, content_type="text/json") def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/apps/profile/migrations/0001_initial.py b/apps/profile/migrations/0001_initial.py index 82d4a4fe7b..ac7841d508 100644 --- a/apps/profile/migrations/0001_initial.py +++ b/apps/profile/migrations/0001_initial.py @@ -8,7 +8,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ @@ -17,51 +16,528 @@ class Migration(migrations.Migration): operations = [ migrations.CreateModel( - name='PaymentHistory', + name="PaymentHistory", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('payment_date', models.DateTimeField()), - ('payment_amount', models.IntegerField()), - ('payment_provider', models.CharField(max_length=20)), - ('payment_identifier', models.CharField(max_length=100, null=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='payments', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("payment_date", models.DateTimeField()), + ("payment_amount", models.IntegerField()), + ("payment_provider", models.CharField(max_length=20)), + ("payment_identifier", models.CharField(max_length=100, null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="payments", + to=settings.AUTH_USER_MODEL, + ), + ), ], options={ - 'ordering': ['-payment_date'], + "ordering": ["-payment_date"], }, ), migrations.CreateModel( - name='Profile', + name="Profile", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('is_premium', models.BooleanField(default=False)), - ('premium_expire', models.DateTimeField(blank=True, null=True)), - ('send_emails', models.BooleanField(default=True)), - ('preferences', models.TextField(default='{}')), - ('view_settings', models.TextField(default='{}')), - ('collapsed_folders', models.TextField(default='[]')), - ('feed_pane_size', models.IntegerField(default=242)), - ('tutorial_finished', models.BooleanField(default=False)), - ('hide_getting_started', models.NullBooleanField(default=False)), - ('has_setup_feeds', models.NullBooleanField(default=False)), - ('has_found_friends', models.NullBooleanField(default=False)), - ('has_trained_intelligence', models.NullBooleanField(default=False)), - ('last_seen_on', models.DateTimeField(default=datetime.datetime.now)), - ('last_seen_ip', models.CharField(blank=True, max_length=50, null=True)), - ('dashboard_date', models.DateTimeField(default=datetime.datetime.now)), - ('timezone', vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0100) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Godthab', '(GMT-0200) America/Godthab'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0830) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qyzylorda', '(GMT+0600) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0300) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100)), - ('secret_token', models.CharField(blank=True, max_length=12, null=True)), - ('stripe_4_digits', models.CharField(blank=True, max_length=4, null=True)), - ('stripe_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("is_premium", models.BooleanField(default=False)), + ("premium_expire", models.DateTimeField(blank=True, null=True)), + ("send_emails", models.BooleanField(default=True)), + ("preferences", models.TextField(default="{}")), + ("view_settings", models.TextField(default="{}")), + ("collapsed_folders", models.TextField(default="[]")), + ("feed_pane_size", models.IntegerField(default=242)), + ("tutorial_finished", models.BooleanField(default=False)), + ("hide_getting_started", models.NullBooleanField(default=False)), + ("has_setup_feeds", models.NullBooleanField(default=False)), + ("has_found_friends", models.NullBooleanField(default=False)), + ("has_trained_intelligence", models.NullBooleanField(default=False)), + ("last_seen_on", models.DateTimeField(default=datetime.datetime.now)), + ("last_seen_ip", models.CharField(blank=True, max_length=50, null=True)), + ("dashboard_date", models.DateTimeField(default=datetime.datetime.now)), + ( + "timezone", + vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0100) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Godthab", "(GMT-0200) America/Godthab"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0830) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qyzylorda", "(GMT+0600) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0300) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), + ), + ("secret_token", models.CharField(blank=True, max_length=12, null=True)), + ("stripe_4_digits", models.CharField(blank=True, max_length=4, null=True)), + ("stripe_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, + related_name="profile", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='StripeIds', + name="StripeIds", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('stripe_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='stripe_ids', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("stripe_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="stripe_ids", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/profile/migrations/0002_auto_20200620_0803.py b/apps/profile/migrations/0002_auto_20200620_0803.py index 8214809149..945ec7d9f5 100644 --- a/apps/profile/migrations/0002_auto_20200620_0803.py +++ b/apps/profile/migrations/0002_auto_20200620_0803.py @@ -6,15 +6,19 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0001_initial'), + ("profile", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='stripeids', - name='user', - field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='stripe_ids', to=settings.AUTH_USER_MODEL), + model_name="stripeids", + name="user", + field=models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="stripe_ids", + to=settings.AUTH_USER_MODEL, + ), ), ] diff --git a/apps/profile/migrations/0003_auto_20201005_0932.py b/apps/profile/migrations/0003_auto_20201005_0932.py index 28c8dd8a22..db8f3c7a71 100644 --- a/apps/profile/migrations/0003_auto_20201005_0932.py +++ b/apps/profile/migrations/0003_auto_20201005_0932.py @@ -5,15 +5,458 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0002_auto_20200620_0803'), + ("profile", "0002_auto_20200620_0803"), ] operations = [ migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0100) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Godthab', '(GMT-0200) America/Godthab'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0830) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qyzylorda', '(GMT+0600) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0300) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0100) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Godthab", "(GMT-0200) America/Godthab"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0830) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qyzylorda", "(GMT+0600) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0300) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0004_auto_20220110_2106.py b/apps/profile/migrations/0004_auto_20220110_2106.py index 5aaed426cb..676d762a55 100644 --- a/apps/profile/migrations/0004_auto_20220110_2106.py +++ b/apps/profile/migrations/0004_auto_20220110_2106.py @@ -5,40 +5,484 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0003_auto_20201005_0932'), + ("profile", "0003_auto_20201005_0932"), ] operations = [ migrations.AddField( - model_name='profile', - name='is_pro', + model_name="profile", + name="is_pro", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_found_friends', + model_name="profile", + name="has_found_friends", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_setup_feeds', + model_name="profile", + name="has_setup_feeds", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='has_trained_intelligence', + model_name="profile", + name="has_trained_intelligence", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='hide_getting_started', + model_name="profile", + name="hide_getting_started", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0100) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-1000) America/Adak'), ('America/Anchorage', '(GMT-0900) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0600) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0700) America/Boise'), ('America/Cambridge_Bay', '(GMT-0700) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0600) America/Chicago'), ('America/Chihuahua', '(GMT-0700) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0700) America/Denver'), ('America/Detroit', '(GMT-0500) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0700) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0400) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0400) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0500) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0400) America/Halifax'), ('America/Havana', '(GMT-0500) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0500) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0600) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0500) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0500) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0600) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0500) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0500) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0500) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0700) America/Inuvik'), ('America/Iqaluit', '(GMT-0500) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0900) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0500) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0500) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0800) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0600) America/Matamoros'), ('America/Mazatlan', '(GMT-0700) America/Mazatlan'), ('America/Menominee', '(GMT-0600) America/Menominee'), ('America/Merida', '(GMT-0600) America/Merida'), ('America/Metlakatla', '(GMT-0900) America/Metlakatla'), ('America/Mexico_City', '(GMT-0600) America/Mexico_City'), ('America/Miquelon', '(GMT-0300) America/Miquelon'), ('America/Moncton', '(GMT-0400) America/Moncton'), ('America/Monterrey', '(GMT-0600) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0500) America/Nassau'), ('America/New_York', '(GMT-0500) America/New_York'), ('America/Nipigon', '(GMT-0500) America/Nipigon'), ('America/Nome', '(GMT-0900) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0600) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0600) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0600) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0300) America/Nuuk'), ('America/Ojinaga', '(GMT-0700) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0500) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0500) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0600) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0600) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0600) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT-0100) America/Scoresbysund'), ('America/Sitka', '(GMT-0900) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0330) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0400) America/Thule'), ('America/Thunder_Bay', '(GMT-0500) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0800) America/Tijuana'), ('America/Toronto', '(GMT-0500) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0800) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0600) America/Winnipeg'), ('America/Yakutat', '(GMT-0900) America/Yakutat'), ('America/Yellowknife', '(GMT-0700) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0000) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0100) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0200) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0200) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0200) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0200) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0200) Asia/Gaza'), ('Asia/Hebron', '(GMT+0200) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0200) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0200) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT-0100) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0400) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0000) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0000) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0000) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0400) Canada/Atlantic'), ('Canada/Central', '(GMT-0600) Canada/Central'), ('Canada/Eastern', '(GMT-0500) Canada/Eastern'), ('Canada/Mountain', '(GMT-0700) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0330) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0800) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0100) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0100) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0200) Europe/Athens'), ('Europe/Belgrade', '(GMT+0100) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0100) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0100) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0100) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0200) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0100) Europe/Budapest'), ('Europe/Busingen', '(GMT+0100) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0200) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0100) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0000) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0100) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0000) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0200) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0000) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0000) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0200) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0000) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0100) Europe/Ljubljana'), ('Europe/London', '(GMT+0000) Europe/London'), ('Europe/Luxembourg', '(GMT+0100) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0100) Europe/Madrid'), ('Europe/Malta', '(GMT+0100) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0200) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0100) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0100) Europe/Oslo'), ('Europe/Paris', '(GMT+0100) Europe/Paris'), ('Europe/Podgorica', '(GMT+0100) Europe/Podgorica'), ('Europe/Prague', '(GMT+0100) Europe/Prague'), ('Europe/Riga', '(GMT+0200) Europe/Riga'), ('Europe/Rome', '(GMT+0100) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0100) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0100) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0100) Europe/Skopje'), ('Europe/Sofia', '(GMT+0200) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0100) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0200) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0100) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0200) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0100) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0100) Europe/Vatican'), ('Europe/Vienna', '(GMT+0100) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0200) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0100) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0100) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0200) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0100) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1300) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1200) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0900) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0600) US/Central'), ('US/Eastern', '(GMT-0500) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0700) US/Mountain'), ('US/Pacific', '(GMT-0800) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0100) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-1000) America/Adak"), + ("America/Anchorage", "(GMT-0900) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0600) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0700) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0700) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0600) America/Chicago"), + ("America/Chihuahua", "(GMT-0700) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0700) America/Denver"), + ("America/Detroit", "(GMT-0500) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0700) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0400) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0400) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0500) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0400) America/Halifax"), + ("America/Havana", "(GMT-0500) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0500) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0600) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0500) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0500) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0600) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0500) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0500) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0500) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0700) America/Inuvik"), + ("America/Iqaluit", "(GMT-0500) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0900) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0500) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0500) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0800) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0600) America/Matamoros"), + ("America/Mazatlan", "(GMT-0700) America/Mazatlan"), + ("America/Menominee", "(GMT-0600) America/Menominee"), + ("America/Merida", "(GMT-0600) America/Merida"), + ("America/Metlakatla", "(GMT-0900) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0600) America/Mexico_City"), + ("America/Miquelon", "(GMT-0300) America/Miquelon"), + ("America/Moncton", "(GMT-0400) America/Moncton"), + ("America/Monterrey", "(GMT-0600) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0500) America/Nassau"), + ("America/New_York", "(GMT-0500) America/New_York"), + ("America/Nipigon", "(GMT-0500) America/Nipigon"), + ("America/Nome", "(GMT-0900) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0600) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0600) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0600) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0300) America/Nuuk"), + ("America/Ojinaga", "(GMT-0700) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0500) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0500) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0600) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0600) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0600) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT-0100) America/Scoresbysund"), + ("America/Sitka", "(GMT-0900) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0330) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0400) America/Thule"), + ("America/Thunder_Bay", "(GMT-0500) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0800) America/Tijuana"), + ("America/Toronto", "(GMT-0500) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0800) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0600) America/Winnipeg"), + ("America/Yakutat", "(GMT-0900) America/Yakutat"), + ("America/Yellowknife", "(GMT-0700) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0000) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0100) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0200) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0200) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0200) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0200) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0200) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0200) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0200) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0200) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT-0100) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0400) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0000) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0000) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0000) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0400) Canada/Atlantic"), + ("Canada/Central", "(GMT-0600) Canada/Central"), + ("Canada/Eastern", "(GMT-0500) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0700) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0330) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0800) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0100) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0100) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0200) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0100) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0100) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0100) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0100) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0200) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0100) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0100) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0200) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0100) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0000) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0100) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0000) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0200) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0000) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0000) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0200) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0000) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0100) Europe/Ljubljana"), + ("Europe/London", "(GMT+0000) Europe/London"), + ("Europe/Luxembourg", "(GMT+0100) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0100) Europe/Madrid"), + ("Europe/Malta", "(GMT+0100) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0200) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0100) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0100) Europe/Oslo"), + ("Europe/Paris", "(GMT+0100) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0100) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0100) Europe/Prague"), + ("Europe/Riga", "(GMT+0200) Europe/Riga"), + ("Europe/Rome", "(GMT+0100) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0100) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0100) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0100) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0200) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0100) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0200) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0100) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0200) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0100) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0100) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0100) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0200) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0100) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0100) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0200) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0100) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1300) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1200) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0900) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0600) US/Central"), + ("US/Eastern", "(GMT-0500) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0700) US/Mountain"), + ("US/Pacific", "(GMT-0800) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0005_profile_is_archive.py b/apps/profile/migrations/0005_profile_is_archive.py index 0b87acc915..66f2e1ceab 100644 --- a/apps/profile/migrations/0005_profile_is_archive.py +++ b/apps/profile/migrations/0005_profile_is_archive.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0004_auto_20220110_2106'), + ("profile", "0004_auto_20220110_2106"), ] operations = [ migrations.AddField( - model_name='profile', - name='is_archive', + model_name="profile", + name="is_archive", field=models.BooleanField(blank=True, default=False, null=True), ), ] diff --git a/apps/profile/migrations/0006_profile_days_of_unread.py b/apps/profile/migrations/0006_profile_days_of_unread.py index d7740bbfde..240bcce63a 100644 --- a/apps/profile/migrations/0006_profile_days_of_unread.py +++ b/apps/profile/migrations/0006_profile_days_of_unread.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0005_profile_is_archive'), + ("profile", "0005_profile_is_archive"), ] operations = [ migrations.AddField( - model_name='profile', - name='days_of_unread', + model_name="profile", + name="days_of_unread", field=models.IntegerField(default=30, blank=True, null=True), ), ] diff --git a/apps/profile/migrations/0007_auto_20220125_2108.py b/apps/profile/migrations/0007_auto_20220125_2108.py index 624d89d85f..a456b15e32 100644 --- a/apps/profile/migrations/0007_auto_20220125_2108.py +++ b/apps/profile/migrations/0007_auto_20220125_2108.py @@ -5,20 +5,464 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0006_profile_days_of_unread'), + ("profile", "0006_profile_days_of_unread"), ] operations = [ migrations.AddField( - model_name='profile', - name='premium_renewal', + model_name="profile", + name="premium_renewal", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0100) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-1000) America/Adak'), ('America/Anchorage', '(GMT-0900) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0300) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0600) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0700) America/Boise'), ('America/Cambridge_Bay', '(GMT-0700) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0600) America/Chicago'), ('America/Chihuahua', '(GMT-0700) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0700) America/Denver'), ('America/Detroit', '(GMT-0500) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0700) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0400) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0400) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0500) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0400) America/Halifax'), ('America/Havana', '(GMT-0500) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0500) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0600) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0500) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0500) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0600) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0500) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0500) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0500) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0700) America/Inuvik'), ('America/Iqaluit', '(GMT-0500) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0900) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0500) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0500) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0800) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0600) America/Matamoros'), ('America/Mazatlan', '(GMT-0700) America/Mazatlan'), ('America/Menominee', '(GMT-0600) America/Menominee'), ('America/Merida', '(GMT-0600) America/Merida'), ('America/Metlakatla', '(GMT-0900) America/Metlakatla'), ('America/Mexico_City', '(GMT-0600) America/Mexico_City'), ('America/Miquelon', '(GMT-0300) America/Miquelon'), ('America/Moncton', '(GMT-0400) America/Moncton'), ('America/Monterrey', '(GMT-0600) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0500) America/Nassau'), ('America/New_York', '(GMT-0500) America/New_York'), ('America/Nipigon', '(GMT-0500) America/Nipigon'), ('America/Nome', '(GMT-0900) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0600) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0600) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0600) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0300) America/Nuuk'), ('America/Ojinaga', '(GMT-0700) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0500) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0500) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0600) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0600) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0600) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0300) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT-0100) America/Scoresbysund'), ('America/Sitka', '(GMT-0900) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0330) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0400) America/Thule'), ('America/Thunder_Bay', '(GMT-0500) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0800) America/Tijuana'), ('America/Toronto', '(GMT-0500) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0800) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0600) America/Winnipeg'), ('America/Yakutat', '(GMT-0900) America/Yakutat'), ('America/Yellowknife', '(GMT-0700) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1100) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1300) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0000) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0100) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0200) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0200) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0200) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0200) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0200) Asia/Gaza'), ('Asia/Hebron', '(GMT+0200) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0200) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0200) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0330) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT-0100) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0400) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0000) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0000) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0000) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+1030) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+1030) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1100) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1100) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1100) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1100) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1100) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0400) Canada/Atlantic'), ('Canada/Central', '(GMT-0600) Canada/Central'), ('Canada/Eastern', '(GMT-0500) Canada/Eastern'), ('Canada/Mountain', '(GMT-0700) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0330) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0800) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0100) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0100) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0200) Europe/Athens'), ('Europe/Belgrade', '(GMT+0100) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0100) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0100) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0100) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0200) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0100) Europe/Budapest'), ('Europe/Busingen', '(GMT+0100) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0200) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0100) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0000) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0100) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0000) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0200) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0000) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0000) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0200) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0000) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0100) Europe/Ljubljana'), ('Europe/London', '(GMT+0000) Europe/London'), ('Europe/Luxembourg', '(GMT+0100) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0100) Europe/Madrid'), ('Europe/Malta', '(GMT+0100) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0200) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0100) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0100) Europe/Oslo'), ('Europe/Paris', '(GMT+0100) Europe/Paris'), ('Europe/Podgorica', '(GMT+0100) Europe/Podgorica'), ('Europe/Prague', '(GMT+0100) Europe/Prague'), ('Europe/Riga', '(GMT+0200) Europe/Riga'), ('Europe/Rome', '(GMT+0100) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0100) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0100) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0100) Europe/Skopje'), ('Europe/Sofia', '(GMT+0200) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0100) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0200) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0100) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0200) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0100) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0100) Europe/Vatican'), ('Europe/Vienna', '(GMT+0100) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0200) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0100) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0100) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0200) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0100) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1400) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1300) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1345) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0500) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1200) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0900) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0600) US/Central'), ('US/Eastern', '(GMT-0500) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0700) US/Mountain'), ('US/Pacific', '(GMT-0800) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0100) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-1000) America/Adak"), + ("America/Anchorage", "(GMT-0900) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0300) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0600) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0700) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0700) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0600) America/Chicago"), + ("America/Chihuahua", "(GMT-0700) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0700) America/Denver"), + ("America/Detroit", "(GMT-0500) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0700) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0400) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0400) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0500) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0400) America/Halifax"), + ("America/Havana", "(GMT-0500) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0500) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0600) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0500) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0500) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0600) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0500) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0500) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0500) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0700) America/Inuvik"), + ("America/Iqaluit", "(GMT-0500) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0900) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0500) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0500) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0800) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0600) America/Matamoros"), + ("America/Mazatlan", "(GMT-0700) America/Mazatlan"), + ("America/Menominee", "(GMT-0600) America/Menominee"), + ("America/Merida", "(GMT-0600) America/Merida"), + ("America/Metlakatla", "(GMT-0900) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0600) America/Mexico_City"), + ("America/Miquelon", "(GMT-0300) America/Miquelon"), + ("America/Moncton", "(GMT-0400) America/Moncton"), + ("America/Monterrey", "(GMT-0600) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0500) America/Nassau"), + ("America/New_York", "(GMT-0500) America/New_York"), + ("America/Nipigon", "(GMT-0500) America/Nipigon"), + ("America/Nome", "(GMT-0900) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0600) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0600) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0600) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0300) America/Nuuk"), + ("America/Ojinaga", "(GMT-0700) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0500) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0500) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0600) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0600) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0600) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0300) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT-0100) America/Scoresbysund"), + ("America/Sitka", "(GMT-0900) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0330) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0400) America/Thule"), + ("America/Thunder_Bay", "(GMT-0500) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0800) America/Tijuana"), + ("America/Toronto", "(GMT-0500) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0800) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0600) America/Winnipeg"), + ("America/Yakutat", "(GMT-0900) America/Yakutat"), + ("America/Yellowknife", "(GMT-0700) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1100) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1300) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0000) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0100) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0200) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0200) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0200) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0200) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0200) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0200) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0200) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0200) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0330) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT-0100) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0400) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0000) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0000) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0000) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+1030) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+1030) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1100) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1100) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1100) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1100) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1100) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0400) Canada/Atlantic"), + ("Canada/Central", "(GMT-0600) Canada/Central"), + ("Canada/Eastern", "(GMT-0500) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0700) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0330) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0800) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0100) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0100) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0200) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0100) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0100) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0100) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0100) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0200) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0100) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0100) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0200) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0100) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0000) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0100) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0000) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0200) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0000) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0000) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0200) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0000) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0100) Europe/Ljubljana"), + ("Europe/London", "(GMT+0000) Europe/London"), + ("Europe/Luxembourg", "(GMT+0100) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0100) Europe/Madrid"), + ("Europe/Malta", "(GMT+0100) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0200) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0100) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0100) Europe/Oslo"), + ("Europe/Paris", "(GMT+0100) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0100) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0100) Europe/Prague"), + ("Europe/Riga", "(GMT+0200) Europe/Riga"), + ("Europe/Rome", "(GMT+0100) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0100) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0100) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0100) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0200) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0100) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0200) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0100) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0200) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0100) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0100) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0100) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0200) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0100) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0100) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0200) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0100) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1400) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1300) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1345) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0500) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1200) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0900) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0600) US/Central"), + ("US/Eastern", "(GMT-0500) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0700) US/Mountain"), + ("US/Pacific", "(GMT-0800) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0008_profile_paypal_sub_id.py b/apps/profile/migrations/0008_profile_paypal_sub_id.py index 1b700d17c4..a974b2cc92 100644 --- a/apps/profile/migrations/0008_profile_paypal_sub_id.py +++ b/apps/profile/migrations/0008_profile_paypal_sub_id.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0007_auto_20220125_2108'), + ("profile", "0007_auto_20220125_2108"), ] operations = [ migrations.AddField( - model_name='profile', - name='paypal_sub_id', + model_name="profile", + name="paypal_sub_id", field=models.CharField(blank=True, max_length=24, null=True), ), ] diff --git a/apps/profile/migrations/0009_paypalids.py b/apps/profile/migrations/0009_paypalids.py index c181c77b33..6480de3a34 100644 --- a/apps/profile/migrations/0009_paypalids.py +++ b/apps/profile/migrations/0009_paypalids.py @@ -6,19 +6,29 @@ class Migration(migrations.Migration): - dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), - ('profile', '0008_profile_paypal_sub_id'), + ("profile", "0008_profile_paypal_sub_id"), ] operations = [ migrations.CreateModel( - name='PaypalIds', + name="PaypalIds", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('paypal_sub_id', models.CharField(blank=True, max_length=24, null=True)), - ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='paypal_ids', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("paypal_sub_id", models.CharField(blank=True, max_length=24, null=True)), + ( + "user", + models.ForeignKey( + null=True, + on_delete=django.db.models.deletion.CASCADE, + related_name="paypal_ids", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/profile/migrations/0010_profile_active_provider.py b/apps/profile/migrations/0010_profile_active_provider.py index 53c49773b4..c880e37a90 100644 --- a/apps/profile/migrations/0010_profile_active_provider.py +++ b/apps/profile/migrations/0010_profile_active_provider.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0009_paypalids'), + ("profile", "0009_paypalids"), ] operations = [ migrations.AddField( - model_name='profile', - name='active_provider', + model_name="profile", + name="active_provider", field=models.CharField(blank=True, max_length=24, null=True), ), ] diff --git a/apps/profile/migrations/0011_auto_20220408_1908.py b/apps/profile/migrations/0011_auto_20220408_1908.py index 76f8bc6b12..187fbf0b05 100644 --- a/apps/profile/migrations/0011_auto_20220408_1908.py +++ b/apps/profile/migrations/0011_auto_20220408_1908.py @@ -5,25 +5,469 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0010_profile_active_provider'), + ("profile", "0010_profile_active_provider"), ] operations = [ migrations.AddField( - model_name='paymenthistory', - name='refunded', + model_name="paymenthistory", + name="refunded", field=models.BooleanField(blank=True, null=True), ), migrations.AlterField( - model_name='profile', - name='feed_pane_size', + model_name="profile", + name="feed_pane_size", field=models.IntegerField(default=282), ), migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0000) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0000) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0200) America/Nuuk'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1000) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0000) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0000) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0200) America/Nuuk"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1000) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/migrations/0012_auto_20220511_1710.py b/apps/profile/migrations/0012_auto_20220511_1710.py index a915e7c80b..582db6063d 100644 --- a/apps/profile/migrations/0012_auto_20220511_1710.py +++ b/apps/profile/migrations/0012_auto_20220511_1710.py @@ -5,15 +5,459 @@ class Migration(migrations.Migration): - dependencies = [ - ('profile', '0011_auto_20220408_1908'), + ("profile", "0011_auto_20220408_1908"), ] operations = [ migrations.AlterField( - model_name='profile', - name='timezone', - field=vendor.timezones.fields.TimeZoneField(choices=[('Africa/Abidjan', '(GMT+0000) Africa/Abidjan'), ('Africa/Accra', '(GMT+0000) Africa/Accra'), ('Africa/Addis_Ababa', '(GMT+0300) Africa/Addis_Ababa'), ('Africa/Algiers', '(GMT+0100) Africa/Algiers'), ('Africa/Asmara', '(GMT+0300) Africa/Asmara'), ('Africa/Bamako', '(GMT+0000) Africa/Bamako'), ('Africa/Bangui', '(GMT+0100) Africa/Bangui'), ('Africa/Banjul', '(GMT+0000) Africa/Banjul'), ('Africa/Bissau', '(GMT+0000) Africa/Bissau'), ('Africa/Blantyre', '(GMT+0200) Africa/Blantyre'), ('Africa/Brazzaville', '(GMT+0100) Africa/Brazzaville'), ('Africa/Bujumbura', '(GMT+0200) Africa/Bujumbura'), ('Africa/Cairo', '(GMT+0200) Africa/Cairo'), ('Africa/Casablanca', '(GMT+0100) Africa/Casablanca'), ('Africa/Ceuta', '(GMT+0200) Africa/Ceuta'), ('Africa/Conakry', '(GMT+0000) Africa/Conakry'), ('Africa/Dakar', '(GMT+0000) Africa/Dakar'), ('Africa/Dar_es_Salaam', '(GMT+0300) Africa/Dar_es_Salaam'), ('Africa/Djibouti', '(GMT+0300) Africa/Djibouti'), ('Africa/Douala', '(GMT+0100) Africa/Douala'), ('Africa/El_Aaiun', '(GMT+0100) Africa/El_Aaiun'), ('Africa/Freetown', '(GMT+0000) Africa/Freetown'), ('Africa/Gaborone', '(GMT+0200) Africa/Gaborone'), ('Africa/Harare', '(GMT+0200) Africa/Harare'), ('Africa/Johannesburg', '(GMT+0200) Africa/Johannesburg'), ('Africa/Juba', '(GMT+0300) Africa/Juba'), ('Africa/Kampala', '(GMT+0300) Africa/Kampala'), ('Africa/Khartoum', '(GMT+0200) Africa/Khartoum'), ('Africa/Kigali', '(GMT+0200) Africa/Kigali'), ('Africa/Kinshasa', '(GMT+0100) Africa/Kinshasa'), ('Africa/Lagos', '(GMT+0100) Africa/Lagos'), ('Africa/Libreville', '(GMT+0100) Africa/Libreville'), ('Africa/Lome', '(GMT+0000) Africa/Lome'), ('Africa/Luanda', '(GMT+0100) Africa/Luanda'), ('Africa/Lubumbashi', '(GMT+0200) Africa/Lubumbashi'), ('Africa/Lusaka', '(GMT+0200) Africa/Lusaka'), ('Africa/Malabo', '(GMT+0100) Africa/Malabo'), ('Africa/Maputo', '(GMT+0200) Africa/Maputo'), ('Africa/Maseru', '(GMT+0200) Africa/Maseru'), ('Africa/Mbabane', '(GMT+0200) Africa/Mbabane'), ('Africa/Mogadishu', '(GMT+0300) Africa/Mogadishu'), ('Africa/Monrovia', '(GMT+0000) Africa/Monrovia'), ('Africa/Nairobi', '(GMT+0300) Africa/Nairobi'), ('Africa/Ndjamena', '(GMT+0100) Africa/Ndjamena'), ('Africa/Niamey', '(GMT+0100) Africa/Niamey'), ('Africa/Nouakchott', '(GMT+0000) Africa/Nouakchott'), ('Africa/Ouagadougou', '(GMT+0000) Africa/Ouagadougou'), ('Africa/Porto-Novo', '(GMT+0100) Africa/Porto-Novo'), ('Africa/Sao_Tome', '(GMT+0000) Africa/Sao_Tome'), ('Africa/Tripoli', '(GMT+0200) Africa/Tripoli'), ('Africa/Tunis', '(GMT+0100) Africa/Tunis'), ('Africa/Windhoek', '(GMT+0200) Africa/Windhoek'), ('America/Adak', '(GMT-0900) America/Adak'), ('America/Anchorage', '(GMT-0800) America/Anchorage'), ('America/Anguilla', '(GMT-0400) America/Anguilla'), ('America/Antigua', '(GMT-0400) America/Antigua'), ('America/Araguaina', '(GMT-0300) America/Araguaina'), ('America/Argentina/Buenos_Aires', '(GMT-0300) America/Argentina/Buenos_Aires'), ('America/Argentina/Catamarca', '(GMT-0300) America/Argentina/Catamarca'), ('America/Argentina/Cordoba', '(GMT-0300) America/Argentina/Cordoba'), ('America/Argentina/Jujuy', '(GMT-0300) America/Argentina/Jujuy'), ('America/Argentina/La_Rioja', '(GMT-0300) America/Argentina/La_Rioja'), ('America/Argentina/Mendoza', '(GMT-0300) America/Argentina/Mendoza'), ('America/Argentina/Rio_Gallegos', '(GMT-0300) America/Argentina/Rio_Gallegos'), ('America/Argentina/Salta', '(GMT-0300) America/Argentina/Salta'), ('America/Argentina/San_Juan', '(GMT-0300) America/Argentina/San_Juan'), ('America/Argentina/San_Luis', '(GMT-0300) America/Argentina/San_Luis'), ('America/Argentina/Tucuman', '(GMT-0300) America/Argentina/Tucuman'), ('America/Argentina/Ushuaia', '(GMT-0300) America/Argentina/Ushuaia'), ('America/Aruba', '(GMT-0400) America/Aruba'), ('America/Asuncion', '(GMT-0400) America/Asuncion'), ('America/Atikokan', '(GMT-0500) America/Atikokan'), ('America/Bahia', '(GMT-0300) America/Bahia'), ('America/Bahia_Banderas', '(GMT-0500) America/Bahia_Banderas'), ('America/Barbados', '(GMT-0400) America/Barbados'), ('America/Belem', '(GMT-0300) America/Belem'), ('America/Belize', '(GMT-0600) America/Belize'), ('America/Blanc-Sablon', '(GMT-0400) America/Blanc-Sablon'), ('America/Boa_Vista', '(GMT-0400) America/Boa_Vista'), ('America/Bogota', '(GMT-0500) America/Bogota'), ('America/Boise', '(GMT-0600) America/Boise'), ('America/Cambridge_Bay', '(GMT-0600) America/Cambridge_Bay'), ('America/Campo_Grande', '(GMT-0400) America/Campo_Grande'), ('America/Cancun', '(GMT-0500) America/Cancun'), ('America/Caracas', '(GMT-0400) America/Caracas'), ('America/Cayenne', '(GMT-0300) America/Cayenne'), ('America/Cayman', '(GMT-0500) America/Cayman'), ('America/Chicago', '(GMT-0500) America/Chicago'), ('America/Chihuahua', '(GMT-0600) America/Chihuahua'), ('America/Costa_Rica', '(GMT-0600) America/Costa_Rica'), ('America/Creston', '(GMT-0700) America/Creston'), ('America/Cuiaba', '(GMT-0400) America/Cuiaba'), ('America/Curacao', '(GMT-0400) America/Curacao'), ('America/Danmarkshavn', '(GMT+0000) America/Danmarkshavn'), ('America/Dawson', '(GMT-0700) America/Dawson'), ('America/Dawson_Creek', '(GMT-0700) America/Dawson_Creek'), ('America/Denver', '(GMT-0600) America/Denver'), ('America/Detroit', '(GMT-0400) America/Detroit'), ('America/Dominica', '(GMT-0400) America/Dominica'), ('America/Edmonton', '(GMT-0600) America/Edmonton'), ('America/Eirunepe', '(GMT-0500) America/Eirunepe'), ('America/El_Salvador', '(GMT-0600) America/El_Salvador'), ('America/Fort_Nelson', '(GMT-0700) America/Fort_Nelson'), ('America/Fortaleza', '(GMT-0300) America/Fortaleza'), ('America/Glace_Bay', '(GMT-0300) America/Glace_Bay'), ('America/Goose_Bay', '(GMT-0300) America/Goose_Bay'), ('America/Grand_Turk', '(GMT-0400) America/Grand_Turk'), ('America/Grenada', '(GMT-0400) America/Grenada'), ('America/Guadeloupe', '(GMT-0400) America/Guadeloupe'), ('America/Guatemala', '(GMT-0600) America/Guatemala'), ('America/Guayaquil', '(GMT-0500) America/Guayaquil'), ('America/Guyana', '(GMT-0400) America/Guyana'), ('America/Halifax', '(GMT-0300) America/Halifax'), ('America/Havana', '(GMT-0400) America/Havana'), ('America/Hermosillo', '(GMT-0700) America/Hermosillo'), ('America/Indiana/Indianapolis', '(GMT-0400) America/Indiana/Indianapolis'), ('America/Indiana/Knox', '(GMT-0500) America/Indiana/Knox'), ('America/Indiana/Marengo', '(GMT-0400) America/Indiana/Marengo'), ('America/Indiana/Petersburg', '(GMT-0400) America/Indiana/Petersburg'), ('America/Indiana/Tell_City', '(GMT-0500) America/Indiana/Tell_City'), ('America/Indiana/Vevay', '(GMT-0400) America/Indiana/Vevay'), ('America/Indiana/Vincennes', '(GMT-0400) America/Indiana/Vincennes'), ('America/Indiana/Winamac', '(GMT-0400) America/Indiana/Winamac'), ('America/Inuvik', '(GMT-0600) America/Inuvik'), ('America/Iqaluit', '(GMT-0400) America/Iqaluit'), ('America/Jamaica', '(GMT-0500) America/Jamaica'), ('America/Juneau', '(GMT-0800) America/Juneau'), ('America/Kentucky/Louisville', '(GMT-0400) America/Kentucky/Louisville'), ('America/Kentucky/Monticello', '(GMT-0400) America/Kentucky/Monticello'), ('America/Kralendijk', '(GMT-0400) America/Kralendijk'), ('America/La_Paz', '(GMT-0400) America/La_Paz'), ('America/Lima', '(GMT-0500) America/Lima'), ('America/Los_Angeles', '(GMT-0700) America/Los_Angeles'), ('America/Lower_Princes', '(GMT-0400) America/Lower_Princes'), ('America/Maceio', '(GMT-0300) America/Maceio'), ('America/Managua', '(GMT-0600) America/Managua'), ('America/Manaus', '(GMT-0400) America/Manaus'), ('America/Marigot', '(GMT-0400) America/Marigot'), ('America/Martinique', '(GMT-0400) America/Martinique'), ('America/Matamoros', '(GMT-0500) America/Matamoros'), ('America/Mazatlan', '(GMT-0600) America/Mazatlan'), ('America/Menominee', '(GMT-0500) America/Menominee'), ('America/Merida', '(GMT-0500) America/Merida'), ('America/Metlakatla', '(GMT-0800) America/Metlakatla'), ('America/Mexico_City', '(GMT-0500) America/Mexico_City'), ('America/Miquelon', '(GMT-0200) America/Miquelon'), ('America/Moncton', '(GMT-0300) America/Moncton'), ('America/Monterrey', '(GMT-0500) America/Monterrey'), ('America/Montevideo', '(GMT-0300) America/Montevideo'), ('America/Montserrat', '(GMT-0400) America/Montserrat'), ('America/Nassau', '(GMT-0400) America/Nassau'), ('America/New_York', '(GMT-0400) America/New_York'), ('America/Nipigon', '(GMT-0400) America/Nipigon'), ('America/Nome', '(GMT-0800) America/Nome'), ('America/Noronha', '(GMT-0200) America/Noronha'), ('America/North_Dakota/Beulah', '(GMT-0500) America/North_Dakota/Beulah'), ('America/North_Dakota/Center', '(GMT-0500) America/North_Dakota/Center'), ('America/North_Dakota/New_Salem', '(GMT-0500) America/North_Dakota/New_Salem'), ('America/Nuuk', '(GMT-0200) America/Nuuk'), ('America/Ojinaga', '(GMT-0600) America/Ojinaga'), ('America/Panama', '(GMT-0500) America/Panama'), ('America/Pangnirtung', '(GMT-0400) America/Pangnirtung'), ('America/Paramaribo', '(GMT-0300) America/Paramaribo'), ('America/Phoenix', '(GMT-0700) America/Phoenix'), ('America/Port-au-Prince', '(GMT-0400) America/Port-au-Prince'), ('America/Port_of_Spain', '(GMT-0400) America/Port_of_Spain'), ('America/Porto_Velho', '(GMT-0400) America/Porto_Velho'), ('America/Puerto_Rico', '(GMT-0400) America/Puerto_Rico'), ('America/Punta_Arenas', '(GMT-0300) America/Punta_Arenas'), ('America/Rainy_River', '(GMT-0500) America/Rainy_River'), ('America/Rankin_Inlet', '(GMT-0500) America/Rankin_Inlet'), ('America/Recife', '(GMT-0300) America/Recife'), ('America/Regina', '(GMT-0600) America/Regina'), ('America/Resolute', '(GMT-0500) America/Resolute'), ('America/Rio_Branco', '(GMT-0500) America/Rio_Branco'), ('America/Santarem', '(GMT-0300) America/Santarem'), ('America/Santiago', '(GMT-0400) America/Santiago'), ('America/Santo_Domingo', '(GMT-0400) America/Santo_Domingo'), ('America/Sao_Paulo', '(GMT-0300) America/Sao_Paulo'), ('America/Scoresbysund', '(GMT+0000) America/Scoresbysund'), ('America/Sitka', '(GMT-0800) America/Sitka'), ('America/St_Barthelemy', '(GMT-0400) America/St_Barthelemy'), ('America/St_Johns', '(GMT-0230) America/St_Johns'), ('America/St_Kitts', '(GMT-0400) America/St_Kitts'), ('America/St_Lucia', '(GMT-0400) America/St_Lucia'), ('America/St_Thomas', '(GMT-0400) America/St_Thomas'), ('America/St_Vincent', '(GMT-0400) America/St_Vincent'), ('America/Swift_Current', '(GMT-0600) America/Swift_Current'), ('America/Tegucigalpa', '(GMT-0600) America/Tegucigalpa'), ('America/Thule', '(GMT-0300) America/Thule'), ('America/Thunder_Bay', '(GMT-0400) America/Thunder_Bay'), ('America/Tijuana', '(GMT-0700) America/Tijuana'), ('America/Toronto', '(GMT-0400) America/Toronto'), ('America/Tortola', '(GMT-0400) America/Tortola'), ('America/Vancouver', '(GMT-0700) America/Vancouver'), ('America/Whitehorse', '(GMT-0700) America/Whitehorse'), ('America/Winnipeg', '(GMT-0500) America/Winnipeg'), ('America/Yakutat', '(GMT-0800) America/Yakutat'), ('America/Yellowknife', '(GMT-0600) America/Yellowknife'), ('Antarctica/Casey', '(GMT+1100) Antarctica/Casey'), ('Antarctica/Davis', '(GMT+0700) Antarctica/Davis'), ('Antarctica/DumontDUrville', '(GMT+1000) Antarctica/DumontDUrville'), ('Antarctica/Macquarie', '(GMT+1000) Antarctica/Macquarie'), ('Antarctica/Mawson', '(GMT+0500) Antarctica/Mawson'), ('Antarctica/McMurdo', '(GMT+1200) Antarctica/McMurdo'), ('Antarctica/Palmer', '(GMT-0300) Antarctica/Palmer'), ('Antarctica/Rothera', '(GMT-0300) Antarctica/Rothera'), ('Antarctica/Syowa', '(GMT+0300) Antarctica/Syowa'), ('Antarctica/Troll', '(GMT+0200) Antarctica/Troll'), ('Antarctica/Vostok', '(GMT+0600) Antarctica/Vostok'), ('Arctic/Longyearbyen', '(GMT+0200) Arctic/Longyearbyen'), ('Asia/Aden', '(GMT+0300) Asia/Aden'), ('Asia/Almaty', '(GMT+0600) Asia/Almaty'), ('Asia/Amman', '(GMT+0300) Asia/Amman'), ('Asia/Anadyr', '(GMT+1200) Asia/Anadyr'), ('Asia/Aqtau', '(GMT+0500) Asia/Aqtau'), ('Asia/Aqtobe', '(GMT+0500) Asia/Aqtobe'), ('Asia/Ashgabat', '(GMT+0500) Asia/Ashgabat'), ('Asia/Atyrau', '(GMT+0500) Asia/Atyrau'), ('Asia/Baghdad', '(GMT+0300) Asia/Baghdad'), ('Asia/Bahrain', '(GMT+0300) Asia/Bahrain'), ('Asia/Baku', '(GMT+0400) Asia/Baku'), ('Asia/Bangkok', '(GMT+0700) Asia/Bangkok'), ('Asia/Barnaul', '(GMT+0700) Asia/Barnaul'), ('Asia/Beirut', '(GMT+0300) Asia/Beirut'), ('Asia/Bishkek', '(GMT+0600) Asia/Bishkek'), ('Asia/Brunei', '(GMT+0800) Asia/Brunei'), ('Asia/Chita', '(GMT+0900) Asia/Chita'), ('Asia/Choibalsan', '(GMT+0800) Asia/Choibalsan'), ('Asia/Colombo', '(GMT+0530) Asia/Colombo'), ('Asia/Damascus', '(GMT+0300) Asia/Damascus'), ('Asia/Dhaka', '(GMT+0600) Asia/Dhaka'), ('Asia/Dili', '(GMT+0900) Asia/Dili'), ('Asia/Dubai', '(GMT+0400) Asia/Dubai'), ('Asia/Dushanbe', '(GMT+0500) Asia/Dushanbe'), ('Asia/Famagusta', '(GMT+0300) Asia/Famagusta'), ('Asia/Gaza', '(GMT+0300) Asia/Gaza'), ('Asia/Hebron', '(GMT+0300) Asia/Hebron'), ('Asia/Ho_Chi_Minh', '(GMT+0700) Asia/Ho_Chi_Minh'), ('Asia/Hong_Kong', '(GMT+0800) Asia/Hong_Kong'), ('Asia/Hovd', '(GMT+0700) Asia/Hovd'), ('Asia/Irkutsk', '(GMT+0800) Asia/Irkutsk'), ('Asia/Jakarta', '(GMT+0700) Asia/Jakarta'), ('Asia/Jayapura', '(GMT+0900) Asia/Jayapura'), ('Asia/Jerusalem', '(GMT+0300) Asia/Jerusalem'), ('Asia/Kabul', '(GMT+0430) Asia/Kabul'), ('Asia/Kamchatka', '(GMT+1200) Asia/Kamchatka'), ('Asia/Karachi', '(GMT+0500) Asia/Karachi'), ('Asia/Kathmandu', '(GMT+0545) Asia/Kathmandu'), ('Asia/Khandyga', '(GMT+0900) Asia/Khandyga'), ('Asia/Kolkata', '(GMT+0530) Asia/Kolkata'), ('Asia/Krasnoyarsk', '(GMT+0700) Asia/Krasnoyarsk'), ('Asia/Kuala_Lumpur', '(GMT+0800) Asia/Kuala_Lumpur'), ('Asia/Kuching', '(GMT+0800) Asia/Kuching'), ('Asia/Kuwait', '(GMT+0300) Asia/Kuwait'), ('Asia/Macau', '(GMT+0800) Asia/Macau'), ('Asia/Magadan', '(GMT+1100) Asia/Magadan'), ('Asia/Makassar', '(GMT+0800) Asia/Makassar'), ('Asia/Manila', '(GMT+0800) Asia/Manila'), ('Asia/Muscat', '(GMT+0400) Asia/Muscat'), ('Asia/Nicosia', '(GMT+0300) Asia/Nicosia'), ('Asia/Novokuznetsk', '(GMT+0700) Asia/Novokuznetsk'), ('Asia/Novosibirsk', '(GMT+0700) Asia/Novosibirsk'), ('Asia/Omsk', '(GMT+0600) Asia/Omsk'), ('Asia/Oral', '(GMT+0500) Asia/Oral'), ('Asia/Phnom_Penh', '(GMT+0700) Asia/Phnom_Penh'), ('Asia/Pontianak', '(GMT+0700) Asia/Pontianak'), ('Asia/Pyongyang', '(GMT+0900) Asia/Pyongyang'), ('Asia/Qatar', '(GMT+0300) Asia/Qatar'), ('Asia/Qostanay', '(GMT+0600) Asia/Qostanay'), ('Asia/Qyzylorda', '(GMT+0500) Asia/Qyzylorda'), ('Asia/Riyadh', '(GMT+0300) Asia/Riyadh'), ('Asia/Sakhalin', '(GMT+1100) Asia/Sakhalin'), ('Asia/Samarkand', '(GMT+0500) Asia/Samarkand'), ('Asia/Seoul', '(GMT+0900) Asia/Seoul'), ('Asia/Shanghai', '(GMT+0800) Asia/Shanghai'), ('Asia/Singapore', '(GMT+0800) Asia/Singapore'), ('Asia/Srednekolymsk', '(GMT+1100) Asia/Srednekolymsk'), ('Asia/Taipei', '(GMT+0800) Asia/Taipei'), ('Asia/Tashkent', '(GMT+0500) Asia/Tashkent'), ('Asia/Tbilisi', '(GMT+0400) Asia/Tbilisi'), ('Asia/Tehran', '(GMT+0430) Asia/Tehran'), ('Asia/Thimphu', '(GMT+0600) Asia/Thimphu'), ('Asia/Tokyo', '(GMT+0900) Asia/Tokyo'), ('Asia/Tomsk', '(GMT+0700) Asia/Tomsk'), ('Asia/Ulaanbaatar', '(GMT+0800) Asia/Ulaanbaatar'), ('Asia/Urumqi', '(GMT+0600) Asia/Urumqi'), ('Asia/Ust-Nera', '(GMT+1000) Asia/Ust-Nera'), ('Asia/Vientiane', '(GMT+0700) Asia/Vientiane'), ('Asia/Vladivostok', '(GMT+1000) Asia/Vladivostok'), ('Asia/Yakutsk', '(GMT+0900) Asia/Yakutsk'), ('Asia/Yangon', '(GMT+0630) Asia/Yangon'), ('Asia/Yekaterinburg', '(GMT+0500) Asia/Yekaterinburg'), ('Asia/Yerevan', '(GMT+0400) Asia/Yerevan'), ('Atlantic/Azores', '(GMT+0000) Atlantic/Azores'), ('Atlantic/Bermuda', '(GMT-0300) Atlantic/Bermuda'), ('Atlantic/Canary', '(GMT+0100) Atlantic/Canary'), ('Atlantic/Cape_Verde', '(GMT-0100) Atlantic/Cape_Verde'), ('Atlantic/Faroe', '(GMT+0100) Atlantic/Faroe'), ('Atlantic/Madeira', '(GMT+0100) Atlantic/Madeira'), ('Atlantic/Reykjavik', '(GMT+0000) Atlantic/Reykjavik'), ('Atlantic/South_Georgia', '(GMT-0200) Atlantic/South_Georgia'), ('Atlantic/St_Helena', '(GMT+0000) Atlantic/St_Helena'), ('Atlantic/Stanley', '(GMT-0300) Atlantic/Stanley'), ('Australia/Adelaide', '(GMT+0930) Australia/Adelaide'), ('Australia/Brisbane', '(GMT+1000) Australia/Brisbane'), ('Australia/Broken_Hill', '(GMT+0930) Australia/Broken_Hill'), ('Australia/Currie', '(GMT+1000) Australia/Currie'), ('Australia/Darwin', '(GMT+0930) Australia/Darwin'), ('Australia/Eucla', '(GMT+0845) Australia/Eucla'), ('Australia/Hobart', '(GMT+1000) Australia/Hobart'), ('Australia/Lindeman', '(GMT+1000) Australia/Lindeman'), ('Australia/Lord_Howe', '(GMT+1030) Australia/Lord_Howe'), ('Australia/Melbourne', '(GMT+1000) Australia/Melbourne'), ('Australia/Perth', '(GMT+0800) Australia/Perth'), ('Australia/Sydney', '(GMT+1000) Australia/Sydney'), ('Canada/Atlantic', '(GMT-0300) Canada/Atlantic'), ('Canada/Central', '(GMT-0500) Canada/Central'), ('Canada/Eastern', '(GMT-0400) Canada/Eastern'), ('Canada/Mountain', '(GMT-0600) Canada/Mountain'), ('Canada/Newfoundland', '(GMT-0230) Canada/Newfoundland'), ('Canada/Pacific', '(GMT-0700) Canada/Pacific'), ('Europe/Amsterdam', '(GMT+0200) Europe/Amsterdam'), ('Europe/Andorra', '(GMT+0200) Europe/Andorra'), ('Europe/Astrakhan', '(GMT+0400) Europe/Astrakhan'), ('Europe/Athens', '(GMT+0300) Europe/Athens'), ('Europe/Belgrade', '(GMT+0200) Europe/Belgrade'), ('Europe/Berlin', '(GMT+0200) Europe/Berlin'), ('Europe/Bratislava', '(GMT+0200) Europe/Bratislava'), ('Europe/Brussels', '(GMT+0200) Europe/Brussels'), ('Europe/Bucharest', '(GMT+0300) Europe/Bucharest'), ('Europe/Budapest', '(GMT+0200) Europe/Budapest'), ('Europe/Busingen', '(GMT+0200) Europe/Busingen'), ('Europe/Chisinau', '(GMT+0300) Europe/Chisinau'), ('Europe/Copenhagen', '(GMT+0200) Europe/Copenhagen'), ('Europe/Dublin', '(GMT+0100) Europe/Dublin'), ('Europe/Gibraltar', '(GMT+0200) Europe/Gibraltar'), ('Europe/Guernsey', '(GMT+0100) Europe/Guernsey'), ('Europe/Helsinki', '(GMT+0300) Europe/Helsinki'), ('Europe/Isle_of_Man', '(GMT+0100) Europe/Isle_of_Man'), ('Europe/Istanbul', '(GMT+0300) Europe/Istanbul'), ('Europe/Jersey', '(GMT+0100) Europe/Jersey'), ('Europe/Kaliningrad', '(GMT+0200) Europe/Kaliningrad'), ('Europe/Kiev', '(GMT+0300) Europe/Kiev'), ('Europe/Kirov', '(GMT+0300) Europe/Kirov'), ('Europe/Lisbon', '(GMT+0100) Europe/Lisbon'), ('Europe/Ljubljana', '(GMT+0200) Europe/Ljubljana'), ('Europe/London', '(GMT+0100) Europe/London'), ('Europe/Luxembourg', '(GMT+0200) Europe/Luxembourg'), ('Europe/Madrid', '(GMT+0200) Europe/Madrid'), ('Europe/Malta', '(GMT+0200) Europe/Malta'), ('Europe/Mariehamn', '(GMT+0300) Europe/Mariehamn'), ('Europe/Minsk', '(GMT+0300) Europe/Minsk'), ('Europe/Monaco', '(GMT+0200) Europe/Monaco'), ('Europe/Moscow', '(GMT+0300) Europe/Moscow'), ('Europe/Oslo', '(GMT+0200) Europe/Oslo'), ('Europe/Paris', '(GMT+0200) Europe/Paris'), ('Europe/Podgorica', '(GMT+0200) Europe/Podgorica'), ('Europe/Prague', '(GMT+0200) Europe/Prague'), ('Europe/Riga', '(GMT+0300) Europe/Riga'), ('Europe/Rome', '(GMT+0200) Europe/Rome'), ('Europe/Samara', '(GMT+0400) Europe/Samara'), ('Europe/San_Marino', '(GMT+0200) Europe/San_Marino'), ('Europe/Sarajevo', '(GMT+0200) Europe/Sarajevo'), ('Europe/Saratov', '(GMT+0400) Europe/Saratov'), ('Europe/Simferopol', '(GMT+0300) Europe/Simferopol'), ('Europe/Skopje', '(GMT+0200) Europe/Skopje'), ('Europe/Sofia', '(GMT+0300) Europe/Sofia'), ('Europe/Stockholm', '(GMT+0200) Europe/Stockholm'), ('Europe/Tallinn', '(GMT+0300) Europe/Tallinn'), ('Europe/Tirane', '(GMT+0200) Europe/Tirane'), ('Europe/Ulyanovsk', '(GMT+0400) Europe/Ulyanovsk'), ('Europe/Uzhgorod', '(GMT+0300) Europe/Uzhgorod'), ('Europe/Vaduz', '(GMT+0200) Europe/Vaduz'), ('Europe/Vatican', '(GMT+0200) Europe/Vatican'), ('Europe/Vienna', '(GMT+0200) Europe/Vienna'), ('Europe/Vilnius', '(GMT+0300) Europe/Vilnius'), ('Europe/Volgograd', '(GMT+0400) Europe/Volgograd'), ('Europe/Warsaw', '(GMT+0200) Europe/Warsaw'), ('Europe/Zagreb', '(GMT+0200) Europe/Zagreb'), ('Europe/Zaporozhye', '(GMT+0300) Europe/Zaporozhye'), ('Europe/Zurich', '(GMT+0200) Europe/Zurich'), ('GMT', '(GMT+0000) GMT'), ('Indian/Antananarivo', '(GMT+0300) Indian/Antananarivo'), ('Indian/Chagos', '(GMT+0600) Indian/Chagos'), ('Indian/Christmas', '(GMT+0700) Indian/Christmas'), ('Indian/Cocos', '(GMT+0630) Indian/Cocos'), ('Indian/Comoro', '(GMT+0300) Indian/Comoro'), ('Indian/Kerguelen', '(GMT+0500) Indian/Kerguelen'), ('Indian/Mahe', '(GMT+0400) Indian/Mahe'), ('Indian/Maldives', '(GMT+0500) Indian/Maldives'), ('Indian/Mauritius', '(GMT+0400) Indian/Mauritius'), ('Indian/Mayotte', '(GMT+0300) Indian/Mayotte'), ('Indian/Reunion', '(GMT+0400) Indian/Reunion'), ('Pacific/Apia', '(GMT+1300) Pacific/Apia'), ('Pacific/Auckland', '(GMT+1200) Pacific/Auckland'), ('Pacific/Bougainville', '(GMT+1100) Pacific/Bougainville'), ('Pacific/Chatham', '(GMT+1245) Pacific/Chatham'), ('Pacific/Chuuk', '(GMT+1000) Pacific/Chuuk'), ('Pacific/Easter', '(GMT-0600) Pacific/Easter'), ('Pacific/Efate', '(GMT+1100) Pacific/Efate'), ('Pacific/Enderbury', '(GMT+1300) Pacific/Enderbury'), ('Pacific/Fakaofo', '(GMT+1300) Pacific/Fakaofo'), ('Pacific/Fiji', '(GMT+1200) Pacific/Fiji'), ('Pacific/Funafuti', '(GMT+1200) Pacific/Funafuti'), ('Pacific/Galapagos', '(GMT-0600) Pacific/Galapagos'), ('Pacific/Gambier', '(GMT-0900) Pacific/Gambier'), ('Pacific/Guadalcanal', '(GMT+1100) Pacific/Guadalcanal'), ('Pacific/Guam', '(GMT+1000) Pacific/Guam'), ('Pacific/Honolulu', '(GMT-1000) Pacific/Honolulu'), ('Pacific/Kiritimati', '(GMT+1400) Pacific/Kiritimati'), ('Pacific/Kosrae', '(GMT+1100) Pacific/Kosrae'), ('Pacific/Kwajalein', '(GMT+1200) Pacific/Kwajalein'), ('Pacific/Majuro', '(GMT+1200) Pacific/Majuro'), ('Pacific/Marquesas', '(GMT-0930) Pacific/Marquesas'), ('Pacific/Midway', '(GMT-1100) Pacific/Midway'), ('Pacific/Nauru', '(GMT+1200) Pacific/Nauru'), ('Pacific/Niue', '(GMT-1100) Pacific/Niue'), ('Pacific/Norfolk', '(GMT+1100) Pacific/Norfolk'), ('Pacific/Noumea', '(GMT+1100) Pacific/Noumea'), ('Pacific/Pago_Pago', '(GMT-1100) Pacific/Pago_Pago'), ('Pacific/Palau', '(GMT+0900) Pacific/Palau'), ('Pacific/Pitcairn', '(GMT-0800) Pacific/Pitcairn'), ('Pacific/Pohnpei', '(GMT+1100) Pacific/Pohnpei'), ('Pacific/Port_Moresby', '(GMT+1000) Pacific/Port_Moresby'), ('Pacific/Rarotonga', '(GMT-1000) Pacific/Rarotonga'), ('Pacific/Saipan', '(GMT+1000) Pacific/Saipan'), ('Pacific/Tahiti', '(GMT-1000) Pacific/Tahiti'), ('Pacific/Tarawa', '(GMT+1200) Pacific/Tarawa'), ('Pacific/Tongatapu', '(GMT+1300) Pacific/Tongatapu'), ('Pacific/Wake', '(GMT+1200) Pacific/Wake'), ('Pacific/Wallis', '(GMT+1200) Pacific/Wallis'), ('US/Alaska', '(GMT-0800) US/Alaska'), ('US/Arizona', '(GMT-0700) US/Arizona'), ('US/Central', '(GMT-0500) US/Central'), ('US/Eastern', '(GMT-0400) US/Eastern'), ('US/Hawaii', '(GMT-1000) US/Hawaii'), ('US/Mountain', '(GMT-0600) US/Mountain'), ('US/Pacific', '(GMT-0700) US/Pacific'), ('UTC', '(GMT+0000) UTC')], default='America/New_York', max_length=100), + model_name="profile", + name="timezone", + field=vendor.timezones.fields.TimeZoneField( + choices=[ + ("Africa/Abidjan", "(GMT+0000) Africa/Abidjan"), + ("Africa/Accra", "(GMT+0000) Africa/Accra"), + ("Africa/Addis_Ababa", "(GMT+0300) Africa/Addis_Ababa"), + ("Africa/Algiers", "(GMT+0100) Africa/Algiers"), + ("Africa/Asmara", "(GMT+0300) Africa/Asmara"), + ("Africa/Bamako", "(GMT+0000) Africa/Bamako"), + ("Africa/Bangui", "(GMT+0100) Africa/Bangui"), + ("Africa/Banjul", "(GMT+0000) Africa/Banjul"), + ("Africa/Bissau", "(GMT+0000) Africa/Bissau"), + ("Africa/Blantyre", "(GMT+0200) Africa/Blantyre"), + ("Africa/Brazzaville", "(GMT+0100) Africa/Brazzaville"), + ("Africa/Bujumbura", "(GMT+0200) Africa/Bujumbura"), + ("Africa/Cairo", "(GMT+0200) Africa/Cairo"), + ("Africa/Casablanca", "(GMT+0100) Africa/Casablanca"), + ("Africa/Ceuta", "(GMT+0200) Africa/Ceuta"), + ("Africa/Conakry", "(GMT+0000) Africa/Conakry"), + ("Africa/Dakar", "(GMT+0000) Africa/Dakar"), + ("Africa/Dar_es_Salaam", "(GMT+0300) Africa/Dar_es_Salaam"), + ("Africa/Djibouti", "(GMT+0300) Africa/Djibouti"), + ("Africa/Douala", "(GMT+0100) Africa/Douala"), + ("Africa/El_Aaiun", "(GMT+0100) Africa/El_Aaiun"), + ("Africa/Freetown", "(GMT+0000) Africa/Freetown"), + ("Africa/Gaborone", "(GMT+0200) Africa/Gaborone"), + ("Africa/Harare", "(GMT+0200) Africa/Harare"), + ("Africa/Johannesburg", "(GMT+0200) Africa/Johannesburg"), + ("Africa/Juba", "(GMT+0300) Africa/Juba"), + ("Africa/Kampala", "(GMT+0300) Africa/Kampala"), + ("Africa/Khartoum", "(GMT+0200) Africa/Khartoum"), + ("Africa/Kigali", "(GMT+0200) Africa/Kigali"), + ("Africa/Kinshasa", "(GMT+0100) Africa/Kinshasa"), + ("Africa/Lagos", "(GMT+0100) Africa/Lagos"), + ("Africa/Libreville", "(GMT+0100) Africa/Libreville"), + ("Africa/Lome", "(GMT+0000) Africa/Lome"), + ("Africa/Luanda", "(GMT+0100) Africa/Luanda"), + ("Africa/Lubumbashi", "(GMT+0200) Africa/Lubumbashi"), + ("Africa/Lusaka", "(GMT+0200) Africa/Lusaka"), + ("Africa/Malabo", "(GMT+0100) Africa/Malabo"), + ("Africa/Maputo", "(GMT+0200) Africa/Maputo"), + ("Africa/Maseru", "(GMT+0200) Africa/Maseru"), + ("Africa/Mbabane", "(GMT+0200) Africa/Mbabane"), + ("Africa/Mogadishu", "(GMT+0300) Africa/Mogadishu"), + ("Africa/Monrovia", "(GMT+0000) Africa/Monrovia"), + ("Africa/Nairobi", "(GMT+0300) Africa/Nairobi"), + ("Africa/Ndjamena", "(GMT+0100) Africa/Ndjamena"), + ("Africa/Niamey", "(GMT+0100) Africa/Niamey"), + ("Africa/Nouakchott", "(GMT+0000) Africa/Nouakchott"), + ("Africa/Ouagadougou", "(GMT+0000) Africa/Ouagadougou"), + ("Africa/Porto-Novo", "(GMT+0100) Africa/Porto-Novo"), + ("Africa/Sao_Tome", "(GMT+0000) Africa/Sao_Tome"), + ("Africa/Tripoli", "(GMT+0200) Africa/Tripoli"), + ("Africa/Tunis", "(GMT+0100) Africa/Tunis"), + ("Africa/Windhoek", "(GMT+0200) Africa/Windhoek"), + ("America/Adak", "(GMT-0900) America/Adak"), + ("America/Anchorage", "(GMT-0800) America/Anchorage"), + ("America/Anguilla", "(GMT-0400) America/Anguilla"), + ("America/Antigua", "(GMT-0400) America/Antigua"), + ("America/Araguaina", "(GMT-0300) America/Araguaina"), + ("America/Argentina/Buenos_Aires", "(GMT-0300) America/Argentina/Buenos_Aires"), + ("America/Argentina/Catamarca", "(GMT-0300) America/Argentina/Catamarca"), + ("America/Argentina/Cordoba", "(GMT-0300) America/Argentina/Cordoba"), + ("America/Argentina/Jujuy", "(GMT-0300) America/Argentina/Jujuy"), + ("America/Argentina/La_Rioja", "(GMT-0300) America/Argentina/La_Rioja"), + ("America/Argentina/Mendoza", "(GMT-0300) America/Argentina/Mendoza"), + ("America/Argentina/Rio_Gallegos", "(GMT-0300) America/Argentina/Rio_Gallegos"), + ("America/Argentina/Salta", "(GMT-0300) America/Argentina/Salta"), + ("America/Argentina/San_Juan", "(GMT-0300) America/Argentina/San_Juan"), + ("America/Argentina/San_Luis", "(GMT-0300) America/Argentina/San_Luis"), + ("America/Argentina/Tucuman", "(GMT-0300) America/Argentina/Tucuman"), + ("America/Argentina/Ushuaia", "(GMT-0300) America/Argentina/Ushuaia"), + ("America/Aruba", "(GMT-0400) America/Aruba"), + ("America/Asuncion", "(GMT-0400) America/Asuncion"), + ("America/Atikokan", "(GMT-0500) America/Atikokan"), + ("America/Bahia", "(GMT-0300) America/Bahia"), + ("America/Bahia_Banderas", "(GMT-0500) America/Bahia_Banderas"), + ("America/Barbados", "(GMT-0400) America/Barbados"), + ("America/Belem", "(GMT-0300) America/Belem"), + ("America/Belize", "(GMT-0600) America/Belize"), + ("America/Blanc-Sablon", "(GMT-0400) America/Blanc-Sablon"), + ("America/Boa_Vista", "(GMT-0400) America/Boa_Vista"), + ("America/Bogota", "(GMT-0500) America/Bogota"), + ("America/Boise", "(GMT-0600) America/Boise"), + ("America/Cambridge_Bay", "(GMT-0600) America/Cambridge_Bay"), + ("America/Campo_Grande", "(GMT-0400) America/Campo_Grande"), + ("America/Cancun", "(GMT-0500) America/Cancun"), + ("America/Caracas", "(GMT-0400) America/Caracas"), + ("America/Cayenne", "(GMT-0300) America/Cayenne"), + ("America/Cayman", "(GMT-0500) America/Cayman"), + ("America/Chicago", "(GMT-0500) America/Chicago"), + ("America/Chihuahua", "(GMT-0600) America/Chihuahua"), + ("America/Costa_Rica", "(GMT-0600) America/Costa_Rica"), + ("America/Creston", "(GMT-0700) America/Creston"), + ("America/Cuiaba", "(GMT-0400) America/Cuiaba"), + ("America/Curacao", "(GMT-0400) America/Curacao"), + ("America/Danmarkshavn", "(GMT+0000) America/Danmarkshavn"), + ("America/Dawson", "(GMT-0700) America/Dawson"), + ("America/Dawson_Creek", "(GMT-0700) America/Dawson_Creek"), + ("America/Denver", "(GMT-0600) America/Denver"), + ("America/Detroit", "(GMT-0400) America/Detroit"), + ("America/Dominica", "(GMT-0400) America/Dominica"), + ("America/Edmonton", "(GMT-0600) America/Edmonton"), + ("America/Eirunepe", "(GMT-0500) America/Eirunepe"), + ("America/El_Salvador", "(GMT-0600) America/El_Salvador"), + ("America/Fort_Nelson", "(GMT-0700) America/Fort_Nelson"), + ("America/Fortaleza", "(GMT-0300) America/Fortaleza"), + ("America/Glace_Bay", "(GMT-0300) America/Glace_Bay"), + ("America/Goose_Bay", "(GMT-0300) America/Goose_Bay"), + ("America/Grand_Turk", "(GMT-0400) America/Grand_Turk"), + ("America/Grenada", "(GMT-0400) America/Grenada"), + ("America/Guadeloupe", "(GMT-0400) America/Guadeloupe"), + ("America/Guatemala", "(GMT-0600) America/Guatemala"), + ("America/Guayaquil", "(GMT-0500) America/Guayaquil"), + ("America/Guyana", "(GMT-0400) America/Guyana"), + ("America/Halifax", "(GMT-0300) America/Halifax"), + ("America/Havana", "(GMT-0400) America/Havana"), + ("America/Hermosillo", "(GMT-0700) America/Hermosillo"), + ("America/Indiana/Indianapolis", "(GMT-0400) America/Indiana/Indianapolis"), + ("America/Indiana/Knox", "(GMT-0500) America/Indiana/Knox"), + ("America/Indiana/Marengo", "(GMT-0400) America/Indiana/Marengo"), + ("America/Indiana/Petersburg", "(GMT-0400) America/Indiana/Petersburg"), + ("America/Indiana/Tell_City", "(GMT-0500) America/Indiana/Tell_City"), + ("America/Indiana/Vevay", "(GMT-0400) America/Indiana/Vevay"), + ("America/Indiana/Vincennes", "(GMT-0400) America/Indiana/Vincennes"), + ("America/Indiana/Winamac", "(GMT-0400) America/Indiana/Winamac"), + ("America/Inuvik", "(GMT-0600) America/Inuvik"), + ("America/Iqaluit", "(GMT-0400) America/Iqaluit"), + ("America/Jamaica", "(GMT-0500) America/Jamaica"), + ("America/Juneau", "(GMT-0800) America/Juneau"), + ("America/Kentucky/Louisville", "(GMT-0400) America/Kentucky/Louisville"), + ("America/Kentucky/Monticello", "(GMT-0400) America/Kentucky/Monticello"), + ("America/Kralendijk", "(GMT-0400) America/Kralendijk"), + ("America/La_Paz", "(GMT-0400) America/La_Paz"), + ("America/Lima", "(GMT-0500) America/Lima"), + ("America/Los_Angeles", "(GMT-0700) America/Los_Angeles"), + ("America/Lower_Princes", "(GMT-0400) America/Lower_Princes"), + ("America/Maceio", "(GMT-0300) America/Maceio"), + ("America/Managua", "(GMT-0600) America/Managua"), + ("America/Manaus", "(GMT-0400) America/Manaus"), + ("America/Marigot", "(GMT-0400) America/Marigot"), + ("America/Martinique", "(GMT-0400) America/Martinique"), + ("America/Matamoros", "(GMT-0500) America/Matamoros"), + ("America/Mazatlan", "(GMT-0600) America/Mazatlan"), + ("America/Menominee", "(GMT-0500) America/Menominee"), + ("America/Merida", "(GMT-0500) America/Merida"), + ("America/Metlakatla", "(GMT-0800) America/Metlakatla"), + ("America/Mexico_City", "(GMT-0500) America/Mexico_City"), + ("America/Miquelon", "(GMT-0200) America/Miquelon"), + ("America/Moncton", "(GMT-0300) America/Moncton"), + ("America/Monterrey", "(GMT-0500) America/Monterrey"), + ("America/Montevideo", "(GMT-0300) America/Montevideo"), + ("America/Montserrat", "(GMT-0400) America/Montserrat"), + ("America/Nassau", "(GMT-0400) America/Nassau"), + ("America/New_York", "(GMT-0400) America/New_York"), + ("America/Nipigon", "(GMT-0400) America/Nipigon"), + ("America/Nome", "(GMT-0800) America/Nome"), + ("America/Noronha", "(GMT-0200) America/Noronha"), + ("America/North_Dakota/Beulah", "(GMT-0500) America/North_Dakota/Beulah"), + ("America/North_Dakota/Center", "(GMT-0500) America/North_Dakota/Center"), + ("America/North_Dakota/New_Salem", "(GMT-0500) America/North_Dakota/New_Salem"), + ("America/Nuuk", "(GMT-0200) America/Nuuk"), + ("America/Ojinaga", "(GMT-0600) America/Ojinaga"), + ("America/Panama", "(GMT-0500) America/Panama"), + ("America/Pangnirtung", "(GMT-0400) America/Pangnirtung"), + ("America/Paramaribo", "(GMT-0300) America/Paramaribo"), + ("America/Phoenix", "(GMT-0700) America/Phoenix"), + ("America/Port-au-Prince", "(GMT-0400) America/Port-au-Prince"), + ("America/Port_of_Spain", "(GMT-0400) America/Port_of_Spain"), + ("America/Porto_Velho", "(GMT-0400) America/Porto_Velho"), + ("America/Puerto_Rico", "(GMT-0400) America/Puerto_Rico"), + ("America/Punta_Arenas", "(GMT-0300) America/Punta_Arenas"), + ("America/Rainy_River", "(GMT-0500) America/Rainy_River"), + ("America/Rankin_Inlet", "(GMT-0500) America/Rankin_Inlet"), + ("America/Recife", "(GMT-0300) America/Recife"), + ("America/Regina", "(GMT-0600) America/Regina"), + ("America/Resolute", "(GMT-0500) America/Resolute"), + ("America/Rio_Branco", "(GMT-0500) America/Rio_Branco"), + ("America/Santarem", "(GMT-0300) America/Santarem"), + ("America/Santiago", "(GMT-0400) America/Santiago"), + ("America/Santo_Domingo", "(GMT-0400) America/Santo_Domingo"), + ("America/Sao_Paulo", "(GMT-0300) America/Sao_Paulo"), + ("America/Scoresbysund", "(GMT+0000) America/Scoresbysund"), + ("America/Sitka", "(GMT-0800) America/Sitka"), + ("America/St_Barthelemy", "(GMT-0400) America/St_Barthelemy"), + ("America/St_Johns", "(GMT-0230) America/St_Johns"), + ("America/St_Kitts", "(GMT-0400) America/St_Kitts"), + ("America/St_Lucia", "(GMT-0400) America/St_Lucia"), + ("America/St_Thomas", "(GMT-0400) America/St_Thomas"), + ("America/St_Vincent", "(GMT-0400) America/St_Vincent"), + ("America/Swift_Current", "(GMT-0600) America/Swift_Current"), + ("America/Tegucigalpa", "(GMT-0600) America/Tegucigalpa"), + ("America/Thule", "(GMT-0300) America/Thule"), + ("America/Thunder_Bay", "(GMT-0400) America/Thunder_Bay"), + ("America/Tijuana", "(GMT-0700) America/Tijuana"), + ("America/Toronto", "(GMT-0400) America/Toronto"), + ("America/Tortola", "(GMT-0400) America/Tortola"), + ("America/Vancouver", "(GMT-0700) America/Vancouver"), + ("America/Whitehorse", "(GMT-0700) America/Whitehorse"), + ("America/Winnipeg", "(GMT-0500) America/Winnipeg"), + ("America/Yakutat", "(GMT-0800) America/Yakutat"), + ("America/Yellowknife", "(GMT-0600) America/Yellowknife"), + ("Antarctica/Casey", "(GMT+1100) Antarctica/Casey"), + ("Antarctica/Davis", "(GMT+0700) Antarctica/Davis"), + ("Antarctica/DumontDUrville", "(GMT+1000) Antarctica/DumontDUrville"), + ("Antarctica/Macquarie", "(GMT+1000) Antarctica/Macquarie"), + ("Antarctica/Mawson", "(GMT+0500) Antarctica/Mawson"), + ("Antarctica/McMurdo", "(GMT+1200) Antarctica/McMurdo"), + ("Antarctica/Palmer", "(GMT-0300) Antarctica/Palmer"), + ("Antarctica/Rothera", "(GMT-0300) Antarctica/Rothera"), + ("Antarctica/Syowa", "(GMT+0300) Antarctica/Syowa"), + ("Antarctica/Troll", "(GMT+0200) Antarctica/Troll"), + ("Antarctica/Vostok", "(GMT+0600) Antarctica/Vostok"), + ("Arctic/Longyearbyen", "(GMT+0200) Arctic/Longyearbyen"), + ("Asia/Aden", "(GMT+0300) Asia/Aden"), + ("Asia/Almaty", "(GMT+0600) Asia/Almaty"), + ("Asia/Amman", "(GMT+0300) Asia/Amman"), + ("Asia/Anadyr", "(GMT+1200) Asia/Anadyr"), + ("Asia/Aqtau", "(GMT+0500) Asia/Aqtau"), + ("Asia/Aqtobe", "(GMT+0500) Asia/Aqtobe"), + ("Asia/Ashgabat", "(GMT+0500) Asia/Ashgabat"), + ("Asia/Atyrau", "(GMT+0500) Asia/Atyrau"), + ("Asia/Baghdad", "(GMT+0300) Asia/Baghdad"), + ("Asia/Bahrain", "(GMT+0300) Asia/Bahrain"), + ("Asia/Baku", "(GMT+0400) Asia/Baku"), + ("Asia/Bangkok", "(GMT+0700) Asia/Bangkok"), + ("Asia/Barnaul", "(GMT+0700) Asia/Barnaul"), + ("Asia/Beirut", "(GMT+0300) Asia/Beirut"), + ("Asia/Bishkek", "(GMT+0600) Asia/Bishkek"), + ("Asia/Brunei", "(GMT+0800) Asia/Brunei"), + ("Asia/Chita", "(GMT+0900) Asia/Chita"), + ("Asia/Choibalsan", "(GMT+0800) Asia/Choibalsan"), + ("Asia/Colombo", "(GMT+0530) Asia/Colombo"), + ("Asia/Damascus", "(GMT+0300) Asia/Damascus"), + ("Asia/Dhaka", "(GMT+0600) Asia/Dhaka"), + ("Asia/Dili", "(GMT+0900) Asia/Dili"), + ("Asia/Dubai", "(GMT+0400) Asia/Dubai"), + ("Asia/Dushanbe", "(GMT+0500) Asia/Dushanbe"), + ("Asia/Famagusta", "(GMT+0300) Asia/Famagusta"), + ("Asia/Gaza", "(GMT+0300) Asia/Gaza"), + ("Asia/Hebron", "(GMT+0300) Asia/Hebron"), + ("Asia/Ho_Chi_Minh", "(GMT+0700) Asia/Ho_Chi_Minh"), + ("Asia/Hong_Kong", "(GMT+0800) Asia/Hong_Kong"), + ("Asia/Hovd", "(GMT+0700) Asia/Hovd"), + ("Asia/Irkutsk", "(GMT+0800) Asia/Irkutsk"), + ("Asia/Jakarta", "(GMT+0700) Asia/Jakarta"), + ("Asia/Jayapura", "(GMT+0900) Asia/Jayapura"), + ("Asia/Jerusalem", "(GMT+0300) Asia/Jerusalem"), + ("Asia/Kabul", "(GMT+0430) Asia/Kabul"), + ("Asia/Kamchatka", "(GMT+1200) Asia/Kamchatka"), + ("Asia/Karachi", "(GMT+0500) Asia/Karachi"), + ("Asia/Kathmandu", "(GMT+0545) Asia/Kathmandu"), + ("Asia/Khandyga", "(GMT+0900) Asia/Khandyga"), + ("Asia/Kolkata", "(GMT+0530) Asia/Kolkata"), + ("Asia/Krasnoyarsk", "(GMT+0700) Asia/Krasnoyarsk"), + ("Asia/Kuala_Lumpur", "(GMT+0800) Asia/Kuala_Lumpur"), + ("Asia/Kuching", "(GMT+0800) Asia/Kuching"), + ("Asia/Kuwait", "(GMT+0300) Asia/Kuwait"), + ("Asia/Macau", "(GMT+0800) Asia/Macau"), + ("Asia/Magadan", "(GMT+1100) Asia/Magadan"), + ("Asia/Makassar", "(GMT+0800) Asia/Makassar"), + ("Asia/Manila", "(GMT+0800) Asia/Manila"), + ("Asia/Muscat", "(GMT+0400) Asia/Muscat"), + ("Asia/Nicosia", "(GMT+0300) Asia/Nicosia"), + ("Asia/Novokuznetsk", "(GMT+0700) Asia/Novokuznetsk"), + ("Asia/Novosibirsk", "(GMT+0700) Asia/Novosibirsk"), + ("Asia/Omsk", "(GMT+0600) Asia/Omsk"), + ("Asia/Oral", "(GMT+0500) Asia/Oral"), + ("Asia/Phnom_Penh", "(GMT+0700) Asia/Phnom_Penh"), + ("Asia/Pontianak", "(GMT+0700) Asia/Pontianak"), + ("Asia/Pyongyang", "(GMT+0900) Asia/Pyongyang"), + ("Asia/Qatar", "(GMT+0300) Asia/Qatar"), + ("Asia/Qostanay", "(GMT+0600) Asia/Qostanay"), + ("Asia/Qyzylorda", "(GMT+0500) Asia/Qyzylorda"), + ("Asia/Riyadh", "(GMT+0300) Asia/Riyadh"), + ("Asia/Sakhalin", "(GMT+1100) Asia/Sakhalin"), + ("Asia/Samarkand", "(GMT+0500) Asia/Samarkand"), + ("Asia/Seoul", "(GMT+0900) Asia/Seoul"), + ("Asia/Shanghai", "(GMT+0800) Asia/Shanghai"), + ("Asia/Singapore", "(GMT+0800) Asia/Singapore"), + ("Asia/Srednekolymsk", "(GMT+1100) Asia/Srednekolymsk"), + ("Asia/Taipei", "(GMT+0800) Asia/Taipei"), + ("Asia/Tashkent", "(GMT+0500) Asia/Tashkent"), + ("Asia/Tbilisi", "(GMT+0400) Asia/Tbilisi"), + ("Asia/Tehran", "(GMT+0430) Asia/Tehran"), + ("Asia/Thimphu", "(GMT+0600) Asia/Thimphu"), + ("Asia/Tokyo", "(GMT+0900) Asia/Tokyo"), + ("Asia/Tomsk", "(GMT+0700) Asia/Tomsk"), + ("Asia/Ulaanbaatar", "(GMT+0800) Asia/Ulaanbaatar"), + ("Asia/Urumqi", "(GMT+0600) Asia/Urumqi"), + ("Asia/Ust-Nera", "(GMT+1000) Asia/Ust-Nera"), + ("Asia/Vientiane", "(GMT+0700) Asia/Vientiane"), + ("Asia/Vladivostok", "(GMT+1000) Asia/Vladivostok"), + ("Asia/Yakutsk", "(GMT+0900) Asia/Yakutsk"), + ("Asia/Yangon", "(GMT+0630) Asia/Yangon"), + ("Asia/Yekaterinburg", "(GMT+0500) Asia/Yekaterinburg"), + ("Asia/Yerevan", "(GMT+0400) Asia/Yerevan"), + ("Atlantic/Azores", "(GMT+0000) Atlantic/Azores"), + ("Atlantic/Bermuda", "(GMT-0300) Atlantic/Bermuda"), + ("Atlantic/Canary", "(GMT+0100) Atlantic/Canary"), + ("Atlantic/Cape_Verde", "(GMT-0100) Atlantic/Cape_Verde"), + ("Atlantic/Faroe", "(GMT+0100) Atlantic/Faroe"), + ("Atlantic/Madeira", "(GMT+0100) Atlantic/Madeira"), + ("Atlantic/Reykjavik", "(GMT+0000) Atlantic/Reykjavik"), + ("Atlantic/South_Georgia", "(GMT-0200) Atlantic/South_Georgia"), + ("Atlantic/St_Helena", "(GMT+0000) Atlantic/St_Helena"), + ("Atlantic/Stanley", "(GMT-0300) Atlantic/Stanley"), + ("Australia/Adelaide", "(GMT+0930) Australia/Adelaide"), + ("Australia/Brisbane", "(GMT+1000) Australia/Brisbane"), + ("Australia/Broken_Hill", "(GMT+0930) Australia/Broken_Hill"), + ("Australia/Currie", "(GMT+1000) Australia/Currie"), + ("Australia/Darwin", "(GMT+0930) Australia/Darwin"), + ("Australia/Eucla", "(GMT+0845) Australia/Eucla"), + ("Australia/Hobart", "(GMT+1000) Australia/Hobart"), + ("Australia/Lindeman", "(GMT+1000) Australia/Lindeman"), + ("Australia/Lord_Howe", "(GMT+1030) Australia/Lord_Howe"), + ("Australia/Melbourne", "(GMT+1000) Australia/Melbourne"), + ("Australia/Perth", "(GMT+0800) Australia/Perth"), + ("Australia/Sydney", "(GMT+1000) Australia/Sydney"), + ("Canada/Atlantic", "(GMT-0300) Canada/Atlantic"), + ("Canada/Central", "(GMT-0500) Canada/Central"), + ("Canada/Eastern", "(GMT-0400) Canada/Eastern"), + ("Canada/Mountain", "(GMT-0600) Canada/Mountain"), + ("Canada/Newfoundland", "(GMT-0230) Canada/Newfoundland"), + ("Canada/Pacific", "(GMT-0700) Canada/Pacific"), + ("Europe/Amsterdam", "(GMT+0200) Europe/Amsterdam"), + ("Europe/Andorra", "(GMT+0200) Europe/Andorra"), + ("Europe/Astrakhan", "(GMT+0400) Europe/Astrakhan"), + ("Europe/Athens", "(GMT+0300) Europe/Athens"), + ("Europe/Belgrade", "(GMT+0200) Europe/Belgrade"), + ("Europe/Berlin", "(GMT+0200) Europe/Berlin"), + ("Europe/Bratislava", "(GMT+0200) Europe/Bratislava"), + ("Europe/Brussels", "(GMT+0200) Europe/Brussels"), + ("Europe/Bucharest", "(GMT+0300) Europe/Bucharest"), + ("Europe/Budapest", "(GMT+0200) Europe/Budapest"), + ("Europe/Busingen", "(GMT+0200) Europe/Busingen"), + ("Europe/Chisinau", "(GMT+0300) Europe/Chisinau"), + ("Europe/Copenhagen", "(GMT+0200) Europe/Copenhagen"), + ("Europe/Dublin", "(GMT+0100) Europe/Dublin"), + ("Europe/Gibraltar", "(GMT+0200) Europe/Gibraltar"), + ("Europe/Guernsey", "(GMT+0100) Europe/Guernsey"), + ("Europe/Helsinki", "(GMT+0300) Europe/Helsinki"), + ("Europe/Isle_of_Man", "(GMT+0100) Europe/Isle_of_Man"), + ("Europe/Istanbul", "(GMT+0300) Europe/Istanbul"), + ("Europe/Jersey", "(GMT+0100) Europe/Jersey"), + ("Europe/Kaliningrad", "(GMT+0200) Europe/Kaliningrad"), + ("Europe/Kiev", "(GMT+0300) Europe/Kiev"), + ("Europe/Kirov", "(GMT+0300) Europe/Kirov"), + ("Europe/Lisbon", "(GMT+0100) Europe/Lisbon"), + ("Europe/Ljubljana", "(GMT+0200) Europe/Ljubljana"), + ("Europe/London", "(GMT+0100) Europe/London"), + ("Europe/Luxembourg", "(GMT+0200) Europe/Luxembourg"), + ("Europe/Madrid", "(GMT+0200) Europe/Madrid"), + ("Europe/Malta", "(GMT+0200) Europe/Malta"), + ("Europe/Mariehamn", "(GMT+0300) Europe/Mariehamn"), + ("Europe/Minsk", "(GMT+0300) Europe/Minsk"), + ("Europe/Monaco", "(GMT+0200) Europe/Monaco"), + ("Europe/Moscow", "(GMT+0300) Europe/Moscow"), + ("Europe/Oslo", "(GMT+0200) Europe/Oslo"), + ("Europe/Paris", "(GMT+0200) Europe/Paris"), + ("Europe/Podgorica", "(GMT+0200) Europe/Podgorica"), + ("Europe/Prague", "(GMT+0200) Europe/Prague"), + ("Europe/Riga", "(GMT+0300) Europe/Riga"), + ("Europe/Rome", "(GMT+0200) Europe/Rome"), + ("Europe/Samara", "(GMT+0400) Europe/Samara"), + ("Europe/San_Marino", "(GMT+0200) Europe/San_Marino"), + ("Europe/Sarajevo", "(GMT+0200) Europe/Sarajevo"), + ("Europe/Saratov", "(GMT+0400) Europe/Saratov"), + ("Europe/Simferopol", "(GMT+0300) Europe/Simferopol"), + ("Europe/Skopje", "(GMT+0200) Europe/Skopje"), + ("Europe/Sofia", "(GMT+0300) Europe/Sofia"), + ("Europe/Stockholm", "(GMT+0200) Europe/Stockholm"), + ("Europe/Tallinn", "(GMT+0300) Europe/Tallinn"), + ("Europe/Tirane", "(GMT+0200) Europe/Tirane"), + ("Europe/Ulyanovsk", "(GMT+0400) Europe/Ulyanovsk"), + ("Europe/Uzhgorod", "(GMT+0300) Europe/Uzhgorod"), + ("Europe/Vaduz", "(GMT+0200) Europe/Vaduz"), + ("Europe/Vatican", "(GMT+0200) Europe/Vatican"), + ("Europe/Vienna", "(GMT+0200) Europe/Vienna"), + ("Europe/Vilnius", "(GMT+0300) Europe/Vilnius"), + ("Europe/Volgograd", "(GMT+0400) Europe/Volgograd"), + ("Europe/Warsaw", "(GMT+0200) Europe/Warsaw"), + ("Europe/Zagreb", "(GMT+0200) Europe/Zagreb"), + ("Europe/Zaporozhye", "(GMT+0300) Europe/Zaporozhye"), + ("Europe/Zurich", "(GMT+0200) Europe/Zurich"), + ("GMT", "(GMT+0000) GMT"), + ("Indian/Antananarivo", "(GMT+0300) Indian/Antananarivo"), + ("Indian/Chagos", "(GMT+0600) Indian/Chagos"), + ("Indian/Christmas", "(GMT+0700) Indian/Christmas"), + ("Indian/Cocos", "(GMT+0630) Indian/Cocos"), + ("Indian/Comoro", "(GMT+0300) Indian/Comoro"), + ("Indian/Kerguelen", "(GMT+0500) Indian/Kerguelen"), + ("Indian/Mahe", "(GMT+0400) Indian/Mahe"), + ("Indian/Maldives", "(GMT+0500) Indian/Maldives"), + ("Indian/Mauritius", "(GMT+0400) Indian/Mauritius"), + ("Indian/Mayotte", "(GMT+0300) Indian/Mayotte"), + ("Indian/Reunion", "(GMT+0400) Indian/Reunion"), + ("Pacific/Apia", "(GMT+1300) Pacific/Apia"), + ("Pacific/Auckland", "(GMT+1200) Pacific/Auckland"), + ("Pacific/Bougainville", "(GMT+1100) Pacific/Bougainville"), + ("Pacific/Chatham", "(GMT+1245) Pacific/Chatham"), + ("Pacific/Chuuk", "(GMT+1000) Pacific/Chuuk"), + ("Pacific/Easter", "(GMT-0600) Pacific/Easter"), + ("Pacific/Efate", "(GMT+1100) Pacific/Efate"), + ("Pacific/Enderbury", "(GMT+1300) Pacific/Enderbury"), + ("Pacific/Fakaofo", "(GMT+1300) Pacific/Fakaofo"), + ("Pacific/Fiji", "(GMT+1200) Pacific/Fiji"), + ("Pacific/Funafuti", "(GMT+1200) Pacific/Funafuti"), + ("Pacific/Galapagos", "(GMT-0600) Pacific/Galapagos"), + ("Pacific/Gambier", "(GMT-0900) Pacific/Gambier"), + ("Pacific/Guadalcanal", "(GMT+1100) Pacific/Guadalcanal"), + ("Pacific/Guam", "(GMT+1000) Pacific/Guam"), + ("Pacific/Honolulu", "(GMT-1000) Pacific/Honolulu"), + ("Pacific/Kiritimati", "(GMT+1400) Pacific/Kiritimati"), + ("Pacific/Kosrae", "(GMT+1100) Pacific/Kosrae"), + ("Pacific/Kwajalein", "(GMT+1200) Pacific/Kwajalein"), + ("Pacific/Majuro", "(GMT+1200) Pacific/Majuro"), + ("Pacific/Marquesas", "(GMT-0930) Pacific/Marquesas"), + ("Pacific/Midway", "(GMT-1100) Pacific/Midway"), + ("Pacific/Nauru", "(GMT+1200) Pacific/Nauru"), + ("Pacific/Niue", "(GMT-1100) Pacific/Niue"), + ("Pacific/Norfolk", "(GMT+1100) Pacific/Norfolk"), + ("Pacific/Noumea", "(GMT+1100) Pacific/Noumea"), + ("Pacific/Pago_Pago", "(GMT-1100) Pacific/Pago_Pago"), + ("Pacific/Palau", "(GMT+0900) Pacific/Palau"), + ("Pacific/Pitcairn", "(GMT-0800) Pacific/Pitcairn"), + ("Pacific/Pohnpei", "(GMT+1100) Pacific/Pohnpei"), + ("Pacific/Port_Moresby", "(GMT+1000) Pacific/Port_Moresby"), + ("Pacific/Rarotonga", "(GMT-1000) Pacific/Rarotonga"), + ("Pacific/Saipan", "(GMT+1000) Pacific/Saipan"), + ("Pacific/Tahiti", "(GMT-1000) Pacific/Tahiti"), + ("Pacific/Tarawa", "(GMT+1200) Pacific/Tarawa"), + ("Pacific/Tongatapu", "(GMT+1300) Pacific/Tongatapu"), + ("Pacific/Wake", "(GMT+1200) Pacific/Wake"), + ("Pacific/Wallis", "(GMT+1200) Pacific/Wallis"), + ("US/Alaska", "(GMT-0800) US/Alaska"), + ("US/Arizona", "(GMT-0700) US/Arizona"), + ("US/Central", "(GMT-0500) US/Central"), + ("US/Eastern", "(GMT-0400) US/Eastern"), + ("US/Hawaii", "(GMT-1000) US/Hawaii"), + ("US/Mountain", "(GMT-0600) US/Mountain"), + ("US/Pacific", "(GMT-0700) US/Pacific"), + ("UTC", "(GMT+0000) UTC"), + ], + default="America/New_York", + max_length=100, + ), ), ] diff --git a/apps/profile/models.py b/apps/profile/models.py index 2482c01660..3536190387 100644 --- a/apps/profile/models.py +++ b/apps/profile/models.py @@ -41,44 +41,45 @@ from zebra.signals import zebra_webhook_charge_refunded from zebra.signals import zebra_webhook_checkout_session_completed + class Profile(models.Model): - user = models.OneToOneField(User, unique=True, related_name="profile", on_delete=models.CASCADE) - is_premium = models.BooleanField(default=False) - is_archive = models.BooleanField(default=False, blank=True, null=True) - is_pro = models.BooleanField(default=False, blank=True, null=True) - premium_expire = models.DateTimeField(blank=True, null=True) - send_emails = models.BooleanField(default=True) - preferences = models.TextField(default="{}") - view_settings = models.TextField(default="{}") + user = models.OneToOneField(User, unique=True, related_name="profile", on_delete=models.CASCADE) + is_premium = models.BooleanField(default=False) + is_archive = models.BooleanField(default=False, blank=True, null=True) + is_pro = models.BooleanField(default=False, blank=True, null=True) + premium_expire = models.DateTimeField(blank=True, null=True) + send_emails = models.BooleanField(default=True) + preferences = models.TextField(default="{}") + view_settings = models.TextField(default="{}") collapsed_folders = models.TextField(default="[]") - feed_pane_size = models.IntegerField(default=282) - days_of_unread = models.IntegerField(default=settings.DAYS_OF_UNREAD, blank=True, null=True) + feed_pane_size = models.IntegerField(default=282) + days_of_unread = models.IntegerField(default=settings.DAYS_OF_UNREAD, blank=True, null=True) tutorial_finished = models.BooleanField(default=False) hide_getting_started = models.BooleanField(default=False, null=True, blank=True) - has_setup_feeds = models.BooleanField(default=False, null=True, blank=True) + has_setup_feeds = models.BooleanField(default=False, null=True, blank=True) has_found_friends = models.BooleanField(default=False, null=True, blank=True) has_trained_intelligence = models.BooleanField(default=False, null=True, blank=True) - last_seen_on = models.DateTimeField(default=datetime.datetime.now) - last_seen_ip = models.CharField(max_length=50, blank=True, null=True) - dashboard_date = models.DateTimeField(default=datetime.datetime.now) - timezone = TimeZoneField(default="America/New_York") - secret_token = models.CharField(max_length=12, blank=True, null=True) - stripe_4_digits = models.CharField(max_length=4, blank=True, null=True) - stripe_id = models.CharField(max_length=24, blank=True, null=True) - paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) + last_seen_on = models.DateTimeField(default=datetime.datetime.now) + last_seen_ip = models.CharField(max_length=50, blank=True, null=True) + dashboard_date = models.DateTimeField(default=datetime.datetime.now) + timezone = TimeZoneField(default="America/New_York") + secret_token = models.CharField(max_length=12, blank=True, null=True) + stripe_4_digits = models.CharField(max_length=4, blank=True, null=True) + stripe_id = models.CharField(max_length=24, blank=True, null=True) + paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) # paypal_payer_id = models.CharField(max_length=24, blank=True, null=True) - premium_renewal = models.BooleanField(default=False, blank=True, null=True) - active_provider = models.CharField(max_length=24, blank=True, null=True) - + premium_renewal = models.BooleanField(default=False, blank=True, null=True) + active_provider = models.CharField(max_length=24, blank=True, null=True) + def __str__(self): return "%s <%s>%s%s%s" % ( - self.user, - self.user.email, - " (Premium)" if self.is_premium and not self.is_archive and not self.is_pro else "", + self.user, + self.user.email, + " (Premium)" if self.is_premium and not self.is_archive and not self.is_pro else "", " (Premium ARCHIVE)" if self.is_archive and not self.is_pro else "", " (Premium PRO)" if self.is_pro else "", ) - + @classmethod def plan_to_stripe_price(cls, plan): price = None @@ -93,7 +94,7 @@ def plan_to_stripe_price(cls, plan): if settings.DEBUG: price = "price_0KK5twwdsmP8XBlasifbX56Z" return price - + @classmethod def plan_to_paypal_plan_id(cls, plan): price = None @@ -118,13 +119,13 @@ def unread_cutoff(self, force_premium=False, force_archive=False): return datetime.datetime.utcnow() - datetime.timedelta(days=days_of_unread) if self.is_premium or force_premium: return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE) @property def unread_cutoff_premium(self): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + @property def days_of_story_hashes(self): if self.is_archive: @@ -133,19 +134,19 @@ def days_of_story_hashes(self): def canonical(self): return { - 'is_premium': self.is_premium, - 'is_archive': self.is_archive, - 'is_pro': self.is_pro, - 'premium_expire': int(self.premium_expire.strftime('%s')) if self.premium_expire else 0, - 'preferences': json.decode(self.preferences), - 'tutorial_finished': self.tutorial_finished, - 'hide_getting_started': self.hide_getting_started, - 'has_setup_feeds': self.has_setup_feeds, - 'has_found_friends': self.has_found_friends, - 'has_trained_intelligence': self.has_trained_intelligence, - 'dashboard_date': self.dashboard_date + "is_premium": self.is_premium, + "is_archive": self.is_archive, + "is_pro": self.is_pro, + "premium_expire": int(self.premium_expire.strftime("%s")) if self.premium_expire else 0, + "preferences": json.decode(self.preferences), + "tutorial_finished": self.tutorial_finished, + "hide_getting_started": self.hide_getting_started, + "has_setup_feeds": self.has_setup_feeds, + "has_found_friends": self.has_found_friends, + "has_trained_intelligence": self.has_trained_intelligence, + "dashboard_date": self.dashboard_date, } - + def save(self, *args, **kwargs): if not self.secret_token: self.secret_token = generate_secret_token(self.user.username, 12) @@ -153,26 +154,29 @@ def save(self, *args, **kwargs): super(Profile, self).save(*args, **kwargs) except DatabaseError as e: print(f" ---> Profile not saved: {e}") - + def delete_user(self, confirm=False, fast=False): if not confirm: print(" ---> You must pass confirm=True to delete this user.") return - + logging.user(self.user, "Deleting user: %s / %s" % (self.user.email, self.user.profile.last_seen_ip)) try: if not fast: self.cancel_premium() except: logging.user(self.user, "~BR~SK~FWError cancelling premium renewal for: %s" % self.user.username) - + from apps.social.models import MSocialProfile, MSharedStory, MSocialSubscription from apps.social.models import MActivity, MInteraction + try: social_profile = MSocialProfile.objects.get(user_id=self.user.pk) - logging.user(self.user, "Unfollowing %s followings and %s followers" % - (social_profile.following_count, - social_profile.follower_count)) + logging.user( + self.user, + "Unfollowing %s followings and %s followers" + % (social_profile.following_count, social_profile.follower_count), + ) for follow in social_profile.following_user_ids: social_profile.unfollow_user(follow) for follower in social_profile.follower_user_ids: @@ -182,7 +186,7 @@ def delete_user(self, confirm=False, fast=False): except (MSocialProfile.DoesNotExist, IndexError): logging.user(self.user, " ***> No social profile found. S'ok, moving on.") pass - + shared_stories = MSharedStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s shared stories" % shared_stories.count()) for story in shared_stories: @@ -193,54 +197,56 @@ def delete_user(self, confirm=False, fast=False): except MStory.DoesNotExist: pass story.delete() - + subscriptions = MSocialSubscription.objects.filter(subscription_user_id=self.user.pk) logging.user(self.user, "Deleting %s social subscriptions" % subscriptions.count()) subscriptions.delete() - + interactions = MInteraction.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s interactions for user." % interactions.count()) interactions.delete() - + interactions = MInteraction.objects.filter(with_user_id=self.user.pk) logging.user(self.user, "Deleting %s interactions with user." % interactions.count()) interactions.delete() - + activities = MActivity.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s activities for user." % activities.count()) activities.delete() - + activities = MActivity.objects.filter(with_user_id=self.user.pk) logging.user(self.user, "Deleting %s activities with user." % activities.count()) activities.delete() - + starred_stories = MStarredStory.objects.filter(user_id=self.user.pk) logging.user(self.user, "Deleting %s starred stories." % starred_stories.count()) starred_stories.delete() - + paypal_ids = PaypalIds.objects.filter(user=self.user) logging.user(self.user, "Deleting %s PayPal IDs." % paypal_ids.count()) paypal_ids.delete() - + stripe_ids = StripeIds.objects.filter(user=self.user) logging.user(self.user, "Deleting %s Stripe IDs." % stripe_ids.count()) stripe_ids.delete() - + logging.user(self.user, "Deleting user: %s" % self.user) self.user.delete() - + def activate_premium(self, never_expire=False): from apps.profile.tasks import EmailNewPremium - + EmailNewPremium.delay(user_id=self.user.pk) subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 5000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to premium. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to premium. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium self.is_premium = True self.is_archive = False @@ -248,48 +254,57 @@ def activate_premium(self, never_expire=False): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) - + UserSubscription.queue_new_feeds(self.user) - + # self.setup_premium_history() # Let's not call this unnecessarily - + if never_expire: self.premium_expire = None self.save() if not was_premium: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count()), + ) + return True - + def activate_archive(self, never_expire=False): UserSubscription.schedule_fetch_archive_feeds_for_user(self.user.pk) - + subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 2000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to archive. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to archive. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium was_archive = self.is_archive was_pro = self.is_pro @@ -298,52 +313,62 @@ def activate_archive(self, never_expire=False): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + # Count subscribers to turn on archive_subscribers counts, then show that count to users # on the paypal_archive_return page. try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) UserSubscription.queue_new_feeds(self.user) - + self.setup_premium_history() - + if never_expire: self.premium_expire = None self.save() if not was_archive: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ~BBARCHIVE~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ~BBARCHIVE~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" + % (subs.count()), + ) + return True - + def activate_pro(self, never_expire=False): from apps.profile.tasks import EmailNewPremiumPro - + EmailNewPremiumPro.delay(user_id=self.user.pk) - + subs = UserSubscription.objects.filter(user=self.user) if subs.count() > 1000: logging.user(self.user, "~FR~SK~FW~SBWARNING! ~FR%s subscriptions~SN!" % (subs.count())) - mail_admins(f"WARNING! {self.user.username} has {subs.count()} subscriptions", - f"{self.user.username} has {subs.count()} subscriptions and just upgraded to pro. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}") + mail_admins( + f"WARNING! {self.user.username} has {subs.count()} subscriptions", + f"{self.user.username} has {subs.count()} subscriptions and just upgraded to pro. They'll need a refund: {self.user.profile.paypal_sub_id} {self.user.profile.stripe_id} {self.user.email}", + ) return False - + was_premium = self.is_premium was_archive = self.is_archive was_pro = self.is_pro @@ -353,44 +378,52 @@ def activate_pro(self, never_expire=False): self.save() self.user.is_active = True self.user.save() - + # Only auto-enable every feed if a free user is moving to premium if not was_premium: for sub in subs: - if sub.active: continue + if sub.active: + continue sub.active = True try: sub.save() except (IntegrityError, Feed.DoesNotExist): pass - + try: scheduled_feeds = [sub.feed.pk for sub in subs] except Feed.DoesNotExist: scheduled_feeds = [] - logging.user(self.user, "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + self.user, + "~SN~FMTasking the scheduling immediate premium setup of ~SB%s~SN feeds..." + % len(scheduled_feeds), + ) SchedulePremiumSetup.apply_async(kwargs=dict(feed_ids=scheduled_feeds)) - + UserSubscription.queue_new_feeds(self.user) - + self.setup_premium_history() - + if never_expire: self.premium_expire = None self.save() if not was_pro: - logging.user(self.user, "~BY~SK~FW~SBNEW PREMIUM ~BGPRO~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" % (subs.count())) - + logging.user( + self.user, + "~BY~SK~FW~SBNEW PREMIUM ~BGPRO~BY ACCOUNT! WOOHOO!!! ~FR%s subscriptions~SN!" + % (subs.count()), + ) + return True - + def deactivate_premium(self): self.is_premium = False self.is_pro = False self.is_archive = False self.save() - + subs = UserSubscription.objects.filter(user=self.user) for sub in subs: sub.active = False @@ -400,57 +433,61 @@ def deactivate_premium(self): # sub.feed.setup_feed_for_premium_subscribers() except (IntegrityError, Feed.DoesNotExist): pass - - logging.user(self.user, "~BY~FW~SBBOO! Deactivating premium account: ~FR%s subscriptions~SN!" % (subs.count())) - + + logging.user( + self.user, "~BY~FW~SBBOO! Deactivating premium account: ~FR%s subscriptions~SN!" % (subs.count()) + ) + def activate_free(self): if self.user.is_active: return - + self.user.is_active = True self.user.save() self.send_new_user_queue_email() - + def paypal_change_billing_details_url(self): return "https://paypal.com" - + def switch_stripe_subscription(self, plan): stripe_customer = self.stripe_customer() if not stripe_customer: return - + stripe_subscriptions = stripe.Subscription.list(customer=stripe_customer.id).data existing_subscription = None for subscription in stripe_subscriptions: if subscription.plan.active: existing_subscription = subscription break - if not existing_subscription: + if not existing_subscription: return try: stripe.Subscription.modify( existing_subscription.id, cancel_at_period_end=False, - proration_behavior='always_invoice', - items=[{ - 'id': existing_subscription['items']['data'][0].id, - 'price': Profile.plan_to_stripe_price(plan) - }] + proration_behavior="always_invoice", + items=[ + { + "id": existing_subscription["items"]["data"][0].id, + "price": Profile.plan_to_stripe_price(plan), + } + ], ) except stripe.error.CardError as e: logging.user(self.user, f"~FRStripe switch subscription failed: ~SB{e}") return - + self.setup_premium_history() - + return True def cancel_and_prorate_existing_paypal_subscriptions(self, data): paypal_api = self.paypal_api() if not paypal_api: return - + canceled_paypal_sub_id = self.cancel_premium_paypal(cancel_older_subscriptions_only=True) if not canceled_paypal_sub_id: logging.user(self.user, f"~FRCould not cancel and prorate older paypal premium: {data}") @@ -463,36 +500,43 @@ def switch_paypal_subscription_approval_url(self, plan): paypal_api = self.paypal_api() if not paypal_api: return - paypal_return = reverse('paypal-return') + paypal_return = reverse("paypal-return") if plan == "archive": - paypal_return = reverse('paypal-archive-return') + paypal_return = reverse("paypal-archive-return") try: application_context = { - 'shipping_preference': 'NO_SHIPPING', - 'user_action': 'SUBSCRIBE_NOW', + "shipping_preference": "NO_SHIPPING", + "user_action": "SUBSCRIBE_NOW", } if settings.DEBUG: - application_context['return_url'] = f"https://a6d3-161-77-224-226.ngrok.io{paypal_return}" + application_context["return_url"] = f"https://a6d3-161-77-224-226.ngrok.io{paypal_return}" else: - application_context['return_url'] = f"https://{Site.objects.get_current().domain}{paypal_return}" - paypal_subscription = paypal_api.post(f'/v1/billing/subscriptions', { - 'plan_id': Profile.plan_to_paypal_plan_id(plan), - 'custom_id': self.user.pk, - 'application_context': application_context, - }) + application_context[ + "return_url" + ] = f"https://{Site.objects.get_current().domain}{paypal_return}" + paypal_subscription = paypal_api.post( + f"/v1/billing/subscriptions", + { + "plan_id": Profile.plan_to_paypal_plan_id(plan), + "custom_id": self.user.pk, + "application_context": application_context, + }, + ) except paypalrestsdk.ResourceNotFound as e: - logging.user(self.user, f"~FRCouldn't create paypal subscription: {self.paypal_sub_id} {plan}: {e}") + logging.user( + self.user, f"~FRCouldn't create paypal subscription: {self.paypal_sub_id} {plan}: {e}" + ) paypal_subscription = None if not paypal_subscription: return logging.user(self.user, paypal_subscription) - - for link in paypal_subscription.get('links', []): - if link['rel'] == 'approve': - return link['href'] - + + for link in paypal_subscription.get("links", []): + if link["rel"] == "approve": + return link["href"] + logging.user(self.user, f"~FRFailed to switch paypal subscription: ~FC{paypal_subscription}") def store_paypal_sub_id(self, paypal_sub_id, skip_save_primary=False): @@ -503,12 +547,12 @@ def store_paypal_sub_id(self, paypal_sub_id, skip_save_primary=False): if not skip_save_primary or not self.paypal_sub_id: self.paypal_sub_id = paypal_sub_id self.save() - + seen_paypal_ids = set(p.paypal_sub_id for p in self.user.paypal_ids.all()) if paypal_sub_id in seen_paypal_ids: logging.user(self.user, f"~FBPaypal sub seen before, ignoring: {paypal_sub_id}") return - + self.user.paypal_ids.create(paypal_sub_id=paypal_sub_id) logging.user(self.user, f"~FBPaypal sub ~SBadded~SN: ~SB{paypal_sub_id}") @@ -519,7 +563,7 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e active_plan = None premium_renewal = False active_provider = None - + # Find modern Paypal payments self.retrieve_paypal_ids() if self.paypal_sub_id: @@ -534,76 +578,92 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e seen_payments.add(payment.payment_date.date()) total_paypal_payments += 1 if deleted_paypal_payments > 0: - logging.user(self.user, f"~BY~SN~FRDeleting~FW duplicate paypal history: ~SB{deleted_paypal_payments} payments") + logging.user( + self.user, + f"~BY~SN~FRDeleting~FW duplicate paypal history: ~SB{deleted_paypal_payments} payments", + ) paypal_api = self.paypal_api() for paypal_id_model in self.user.paypal_ids.all(): paypal_id = paypal_id_model.paypal_sub_id try: - paypal_subscription = paypal_api.get(f'/v1/billing/subscriptions/{paypal_id}?fields=plan') + paypal_subscription = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}?fields=plan") except paypalrestsdk.ResourceNotFound: logging.user(self.user, f"~FRCouldn't find paypal payments: {paypal_id}") paypal_subscription = None if paypal_subscription: - if paypal_subscription['status'] in ["APPROVAL_PENDING", "APPROVED", "ACTIVE"]: - active_plan = paypal_subscription.get('plan_id', None) + if paypal_subscription["status"] in ["APPROVAL_PENDING", "APPROVED", "ACTIVE"]: + active_plan = paypal_subscription.get("plan_id", None) if not active_plan: - active_plan = paypal_subscription['plan']['name'] + active_plan = paypal_subscription["plan"]["name"] active_provider = "paypal" premium_renewal = True start_date = datetime.datetime(2009, 1, 1).strftime("%Y-%m-%dT%H:%M:%S.000Z") end_date = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.000Z") try: - transactions = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}/transactions?start_time={start_date}&end_time={end_date}") + transactions = paypal_api.get( + f"/v1/billing/subscriptions/{paypal_id}/transactions?start_time={start_date}&end_time={end_date}" + ) except paypalrestsdk.exceptions.ResourceNotFound: transactions = None - if not transactions or 'transactions' not in transactions: + if not transactions or "transactions" not in transactions: logging.user(self.user, f"~FRCouldn't find paypal transactions: ~SB{paypal_id}") continue - for transaction in transactions['transactions']: - created = dateutil.parser.parse(transaction['time']).date() - if transaction['status'] not in ['COMPLETED', 'PARTIALLY_REFUNDED', 'REFUNDED']: continue - if created in seen_payments: continue + for transaction in transactions["transactions"]: + created = dateutil.parser.parse(transaction["time"]).date() + if transaction["status"] not in ["COMPLETED", "PARTIALLY_REFUNDED", "REFUNDED"]: + continue + if created in seen_payments: + continue seen_payments.add(created) total_paypal_payments += 1 refunded = None - if transaction['status'] in ['PARTIALLY_REFUNDED', 'REFUNDED']: + if transaction["status"] in ["PARTIALLY_REFUNDED", "REFUNDED"]: refunded = True - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=int(float(transaction['amount_with_breakdown']['gross_amount']['value'])), - payment_provider='paypal', - refunded=refunded) - - ipns = PayPalIPN.objects.filter(Q(custom=self.user.username) | - Q(payer_email=self.user.email) | - Q(custom=self.user.pk)).order_by('-payment_date') + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=int( + float(transaction["amount_with_breakdown"]["gross_amount"]["value"]) + ), + payment_provider="paypal", + refunded=refunded, + ) + + ipns = PayPalIPN.objects.filter( + Q(custom=self.user.username) | Q(payer_email=self.user.email) | Q(custom=self.user.pk) + ).order_by("-payment_date") for transaction in ipns: if transaction.txn_type != "subscr_payment": continue created = transaction.payment_date.date() - if created in seen_payments: + if created in seen_payments: continue seen_payments.add(created) total_paypal_payments += 1 - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=int(transaction.payment_gross), - payment_provider='paypal') + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=int(transaction.payment_gross), + payment_provider="paypal", + ) else: logging.user(self.user, "~FBNo Paypal payments") - + # Record Stripe payments - existing_stripe_history = PaymentHistory.objects.filter(user=self.user, - payment_provider="stripe") + existing_stripe_history = PaymentHistory.objects.filter(user=self.user, payment_provider="stripe") if existing_stripe_history.count(): - logging.user(self.user, "~BY~SN~FRDeleting~FW existing stripe history: ~SB%s payments" % existing_stripe_history.count()) + logging.user( + self.user, + "~BY~SN~FRDeleting~FW existing stripe history: ~SB%s payments" + % existing_stripe_history.count(), + ) existing_stripe_history.delete() - + if self.stripe_id: self.retrieve_stripe_ids() - + stripe.api_key = settings.STRIPE_SECRET seen_payments = set() for stripe_id_model in self.user.stripe_ids.all(): @@ -611,7 +671,7 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e stripe_customer = stripe.Customer.retrieve(stripe_id) stripe_payments = stripe.Charge.list(customer=stripe_customer.id).data stripe_subscriptions = stripe.Subscription.list(customer=stripe_customer.id).data - + for subscription in stripe_subscriptions: if subscription.plan.active: active_plan = subscription.plan.id @@ -619,21 +679,25 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e if not subscription.cancel_at: premium_renewal = True break - + for payment in stripe_payments: created = datetime.datetime.fromtimestamp(payment.created) - if payment.status == 'failed': continue - if created in seen_payments: continue + if payment.status == "failed": + continue + if created in seen_payments: + continue seen_payments.add(created) total_stripe_payments += 1 refunded = None if payment.refunded: refunded = True - PaymentHistory.objects.get_or_create(user=self.user, - payment_date=created, - payment_amount=payment.amount / 100.0, - payment_provider='stripe', - refunded=refunded) + PaymentHistory.objects.get_or_create( + user=self.user, + payment_date=created, + payment_amount=payment.amount / 100.0, + payment_provider="stripe", + refunded=refunded, + ) else: logging.user(self.user, "~FBNo Stripe payments") @@ -655,14 +719,17 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e recent_payments_count += 1 if not oldest_recent_payment_date or payment.payment_date < oldest_recent_payment_date: oldest_recent_payment_date = payment.payment_date - + if oldest_recent_payment_date: - new_premium_expire = (oldest_recent_payment_date + - datetime.timedelta(days=365*recent_payments_count)) + new_premium_expire = oldest_recent_payment_date + datetime.timedelta( + days=365 * recent_payments_count + ) # Only move premium expire forward, never earlier. Also set expiration if not premium. - if (force_expiration or - (set_premium_expire and not self.premium_expire and not free_lifetime_premium) or - (self.premium_expire and new_premium_expire > self.premium_expire)): + if ( + force_expiration + or (set_premium_expire and not self.premium_expire and not free_lifetime_premium) + or (self.premium_expire and new_premium_expire > self.premium_expire) + ): self.premium_expire = new_premium_expire self.save() @@ -670,28 +737,43 @@ def setup_premium_history(self, alt_email=None, set_premium_expire=True, force_e active_sub_id = self.stripe_id if active_provider == "paypal": active_sub_id = self.paypal_sub_id - logging.user(self.user, "~FCTurning ~SB~%s~SN~FC premium renewal (%s: %s)" % ("FRoff" if not premium_renewal else "FBon", active_provider, active_sub_id)) + logging.user( + self.user, + "~FCTurning ~SB~%s~SN~FC premium renewal (%s: %s)" + % ("FRoff" if not premium_renewal else "FBon", active_provider, active_sub_id), + ) self.premium_renewal = premium_renewal self.active_provider = active_provider self.save() - - logging.user(self.user, "~BY~SN~FWFound ~SB~FB%s paypal~FW~SN and ~SB~FC%s stripe~FW~SN payments (~SB%s payments expire: ~SN~FB%s~FW)" % ( - total_paypal_payments, total_stripe_payments, len(payment_history), self.premium_expire)) - if (set_premium_expire and not self.is_premium and - self.premium_expire > datetime.datetime.now()): + logging.user( + self.user, + "~BY~SN~FWFound ~SB~FB%s paypal~FW~SN and ~SB~FC%s stripe~FW~SN payments (~SB%s payments expire: ~SN~FB%s~FW)" + % (total_paypal_payments, total_stripe_payments, len(payment_history), self.premium_expire), + ) + + if set_premium_expire and not self.is_premium and self.premium_expire > datetime.datetime.now(): self.activate_premium() - - logging.user(self.user, "~FCActive plan: %s, stripe/paypal: %s/%s, is_archive? %s" % (active_plan, Profile.plan_to_stripe_price('archive'), Profile.plan_to_paypal_plan_id('archive'), self.is_archive)) - if (active_plan == Profile.plan_to_stripe_price('pro') and not self.is_pro): + + logging.user( + self.user, + "~FCActive plan: %s, stripe/paypal: %s/%s, is_archive? %s" + % ( + active_plan, + Profile.plan_to_stripe_price("archive"), + Profile.plan_to_paypal_plan_id("archive"), + self.is_archive, + ), + ) + if active_plan == Profile.plan_to_stripe_price("pro") and not self.is_pro: self.activate_pro() - elif (active_plan == Profile.plan_to_stripe_price('archive') and not self.is_archive): + elif active_plan == Profile.plan_to_stripe_price("archive") and not self.is_archive: self.activate_archive() - elif (active_plan == Profile.plan_to_paypal_plan_id('pro') and not self.is_pro): + elif active_plan == Profile.plan_to_paypal_plan_id("pro") and not self.is_pro: self.activate_pro() - elif (active_plan == Profile.plan_to_paypal_plan_id('archive') and not self.is_archive): + elif active_plan == Profile.plan_to_paypal_plan_id("archive") and not self.is_archive: self.activate_archive() - + def preference_value(self, key, default=None): preferences = json.decode(self.preferences) return preferences.get(key, default) @@ -700,8 +782,7 @@ def preference_value(self, key, default=None): def resync_stripe_and_paypal_history(cls, start_days=365, end_days=0, skip=0): start_date = datetime.datetime.now() - datetime.timedelta(days=start_days) end_date = datetime.datetime.now() - datetime.timedelta(days=end_days) - payments = PaymentHistory.objects.filter(payment_date__gte=start_date, - payment_date__lte=end_date) + payments = PaymentHistory.objects.filter(payment_date__gte=start_date, payment_date__lte=end_date) last_seen_date = None for p, payment in enumerate(payments): if p < skip: @@ -711,30 +792,30 @@ def resync_stripe_and_paypal_history(cls, start_days=365, end_days=0, skip=0): if payment.payment_date.date() != last_seen_date: last_seen_date = payment.payment_date.date() print(f" ---> Payment date: {last_seen_date} (#{p})") - + payment.user.profile.setup_premium_history() @classmethod def reimport_stripe_history(cls, limit=10, days=7, starting_after=None): stripe.api_key = settings.STRIPE_SECRET - week = (datetime.datetime.now() - datetime.timedelta(days=days)).strftime('%s') + week = (datetime.datetime.now() - datetime.timedelta(days=days)).strftime("%s") failed = [] i = 0 - + while True: logging.debug(" ---> At %s / %s" % (i, starting_after)) i += 1 try: - data = stripe.Charge.list(created={'gt': week}, count=limit, starting_after=starting_after) + data = stripe.Charge.list(created={"gt": week}, count=limit, starting_after=starting_after) except stripe.error.APIConnectionError: time.sleep(10) continue - charges = data['data'] + charges = data["data"] if not len(charges): logging.debug("At %s (%s), finished" % (i, starting_after)) break starting_after = charges[-1]["id"] - customers = [c['customer'] for c in charges if 'customer' in c] + customers = [c["customer"] for c in charges if "customer" in c] for customer in customers: if not customer: print(" ***> No customer!") @@ -758,8 +839,8 @@ def reimport_stripe_history(cls, limit=10, days=7, starting_after=None): time.sleep(2) continue - return ','.join(failed) - + return ",".join(failed) + def refund_premium(self, partial=False, provider=None): refunded = False if provider == "paypal": @@ -770,24 +851,27 @@ def refund_premium(self, partial=False, provider=None): # self.cancel_premium_stripe() else: # Find last payment, refund that - payment_history = PaymentHistory.objects.filter(user=self.user, - payment_provider__in=['paypal', 'stripe']) + payment_history = PaymentHistory.objects.filter( + user=self.user, payment_provider__in=["paypal", "stripe"] + ) if payment_history.count(): provider = payment_history[0].payment_provider if provider == "stripe": refunded = self.refund_latest_stripe_payment(partial=partial) # self.cancel_premium_stripe() elif provider == "paypal": - refunded = self.refund_paypal_payment_from_subscription(self.paypal_sub_id, prorate=partial) + refunded = self.refund_paypal_payment_from_subscription( + self.paypal_sub_id, prorate=partial + ) self.cancel_premium_paypal() return refunded - + def refund_latest_stripe_payment(self, partial=False): refunded = False if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_payments = stripe.Charge.list(customer=stripe_customer.id).data @@ -797,116 +881,128 @@ def refund_latest_stripe_payment(self, partial=False): else: stripe_payments[0].refund() self.cancel_premium_stripe() - refunded = stripe_payments[0].amount/100 - + refunded = stripe_payments[0].amount / 100 + logging.user(self.user, "~FRRefunding stripe payment: $%s" % refunded) return refunded - + def refund_paypal_payment_from_subscription(self, paypal_sub_id, prorate=False): - if not paypal_sub_id: + if not paypal_sub_id: return - + paypal_api = self.paypal_api() refunded = False # Find transaction from subscription now = datetime.datetime.now() + datetime.timedelta(days=1) # 200 days captures Paypal's 180 day limit on refunds - start_date = (now-datetime.timedelta(days=200)).strftime("%Y-%m-%dT%H:%M:%SZ") + start_date = (now - datetime.timedelta(days=200)).strftime("%Y-%m-%dT%H:%M:%SZ") end_date = now.strftime("%Y-%m-%dT%H:%M:%SZ") try: - transactions = paypal_api.get(f"/v1/billing/subscriptions/{paypal_sub_id}/transactions?start_time={start_date}&end_time={end_date}") + transactions = paypal_api.get( + f"/v1/billing/subscriptions/{paypal_sub_id}/transactions?start_time={start_date}&end_time={end_date}" + ) except paypalrestsdk.ResourceNotFound: transactions = {} - if 'transactions' not in transactions or not len(transactions['transactions']): - logging.user(self.user, f"~FRCouldn't find paypal transactions for refund: {paypal_sub_id} {transactions}") + if "transactions" not in transactions or not len(transactions["transactions"]): + logging.user( + self.user, f"~FRCouldn't find paypal transactions for refund: {paypal_sub_id} {transactions}" + ) return - + # Refund the latest transaction - transaction = transactions['transactions'][0] - today = datetime.datetime.now().strftime('%B %d, %Y') + transaction = transactions["transactions"][0] + today = datetime.datetime.now().strftime("%B %d, %Y") url = f"/v2/payments/captures/{transaction['id']}/refund" - refund_amount = float(transaction['amount_with_breakdown']['gross_amount']['value']) + refund_amount = float(transaction["amount_with_breakdown"]["gross_amount"]["value"]) if prorate: - transaction_date = dateutil.parser.parse(transaction['time']) + transaction_date = dateutil.parser.parse(transaction["time"]) days_since = (datetime.datetime.now() - transaction_date.replace(tzinfo=None)).days if days_since < 365: - days_left = (365 - days_since) - pct_left = days_left/365 + days_left = 365 - days_since + pct_left = days_left / 365 refund_amount = pct_left * refund_amount else: logging.user(self.user, f"~FRCouldn't prorate paypal payment, too old: ~SB{transaction}") try: - response = paypal_api.post(url, { - 'reason': f"Refunded on {today}", - 'amount': { - 'currency_code': 'USD', - 'value': f"{refund_amount:.2f}", - } - }) + response = paypal_api.post( + url, + { + "reason": f"Refunded on {today}", + "amount": { + "currency_code": "USD", + "value": f"{refund_amount:.2f}", + }, + }, + ) except paypalrestsdk.exceptions.ResourceInvalid as e: response = e.response.json() - if len(response.get('details', [])): - response = response['details'][0]['description'] + if len(response.get("details", [])): + response = response["details"][0]["description"] if settings.DEBUG: logging.user(self.user, f"Paypal refund response: {response}") - if 'status' in response and response['status'] == "COMPLETED": - refunded = int(float(transaction['amount_with_breakdown']['gross_amount']['value'])) + if "status" in response and response["status"] == "COMPLETED": + refunded = int(float(transaction["amount_with_breakdown"]["gross_amount"]["value"])) logging.user(self.user, "~FRRefunding paypal payment: $%s/%s" % (refund_amount, refunded)) else: logging.user(self.user, "~FRCouldn't refund paypal payment: %s" % response) refunded = response - + return refunded - + def cancel_premium(self): paypal_cancel = self.cancel_premium_paypal() stripe_cancel = self.cancel_premium_stripe() - self.setup_premium_history() # Sure, webhooks will force new history, but they take forever + self.setup_premium_history() # Sure, webhooks will force new history, but they take forever return stripe_cancel or paypal_cancel - + def cancel_premium_paypal(self, cancel_older_subscriptions_only=False): self.retrieve_paypal_ids() if not self.paypal_sub_id: logging.user(self.user, "~FRUser doesn't have a Paypal subscription, how did we get here?") return if not self.premium_renewal and not cancel_older_subscriptions_only: - logging.user(self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % self.paypal_sub_id) + logging.user( + self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % self.paypal_sub_id + ) return paypal_api = self.paypal_api() - today = datetime.datetime.now().strftime('%B %d, %Y') + today = datetime.datetime.now().strftime("%B %d, %Y") for paypal_id_model in self.user.paypal_ids.all(): paypal_id = paypal_id_model.paypal_sub_id if cancel_older_subscriptions_only and paypal_id == self.paypal_sub_id: - logging.user(self.user, "~FBNot canceling active Paypal subscription: %s" % self.paypal_sub_id) + logging.user( + self.user, "~FBNot canceling active Paypal subscription: %s" % self.paypal_sub_id + ) continue try: - paypal_subscription = paypal_api.get(f'/v1/billing/subscriptions/{paypal_id}') + paypal_subscription = paypal_api.get(f"/v1/billing/subscriptions/{paypal_id}") except paypalrestsdk.ResourceNotFound: logging.user(self.user, f"~FRCouldn't find paypal payments: {paypal_id}") continue - if paypal_subscription['status'] not in ['ACTIVE', 'APPROVED', 'APPROVAL_PENDING']: + if paypal_subscription["status"] not in ["ACTIVE", "APPROVED", "APPROVAL_PENDING"]: logging.user(self.user, "~FRUser ~SBalready~SN canceled Paypal subscription: %s" % paypal_id) continue url = f"/v1/billing/subscriptions/{paypal_id}/suspend" try: - response = paypal_api.post(url, { - 'reason': f"Cancelled on {today}" - }) + response = paypal_api.post(url, {"reason": f"Cancelled on {today}"}) except paypalrestsdk.ResourceNotFound as e: - logging.user(self.user, f"~FRCouldn't find paypal response during ~FB~SB{paypal_id}~SN~FR profile suspend: ~SB~FB{e}") - + logging.user( + self.user, + f"~FRCouldn't find paypal response during ~FB~SB{paypal_id}~SN~FR profile suspend: ~SB~FB{e}", + ) + logging.user(self.user, "~FRCanceling Paypal subscription: %s" % paypal_id) return paypal_id return True - + def cancel_premium_stripe(self): if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET for stripe_id_model in self.user.stripe_ids.all(): stripe_id = stripe_id_model.stripe_id @@ -914,56 +1010,57 @@ def cancel_premium_stripe(self): try: subscriptions = stripe.Subscription.list(customer=stripe_customer) for subscription in subscriptions.data: - stripe.Subscription.modify(subscription['id'], cancel_at_period_end=True) - logging.user(self.user, "~FRCanceling Stripe subscription: %s" % subscription['id']) + stripe.Subscription.modify(subscription["id"], cancel_at_period_end=True) + logging.user(self.user, "~FRCanceling Stripe subscription: %s" % subscription["id"]) except stripe.error.InvalidRequestError: logging.user(self.user, "~FRFailed to cancel Stripe subscription: %s" % stripe_id) continue - + return True - + def retrieve_stripe_ids(self): if not self.stripe_id: return - + stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) stripe_email = stripe_customer.email - + stripe_ids = set() for email in set([stripe_email, self.user.email]): customers = stripe.Customer.list(email=email) for customer in customers: stripe_ids.add(customer.stripe_id) - + self.user.stripe_ids.all().delete() for stripe_id in stripe_ids: self.user.stripe_ids.create(stripe_id=stripe_id) - + def retrieve_paypal_ids(self, force=False): if self.paypal_sub_id and not force: return - - ipns = PayPalIPN.objects.filter(Q(custom=self.user.username) | - Q(payer_email=self.user.email) | - Q(custom=self.user.pk)).order_by('-payment_date') + + ipns = PayPalIPN.objects.filter( + Q(custom=self.user.username) | Q(payer_email=self.user.email) | Q(custom=self.user.pk) + ).order_by("-payment_date") if not len(ipns): return - + self.paypal_sub_id = ipns[0].subscr_id self.save() paypal_ids = set() for ipn in ipns: - if not ipn.subscr_id: continue + if not ipn.subscr_id: + continue paypal_ids.add(ipn.subscr_id) - + seen_paypal_ids = set(p.paypal_sub_id for p in self.user.paypal_ids.all()) for paypal_id in paypal_ids: if paypal_id in seen_paypal_ids: continue self.user.paypal_ids.create(paypal_sub_id=paypal_id) - + @property def latest_paypal_email(self): ipn = PayPalIPN.objects.filter(custom=self.user.username) @@ -971,9 +1068,9 @@ def latest_paypal_email(self): ipn = PayPalIPN.objects.filter(payer_email=self.user.email) if not len(ipn): return - + return ipn[0].payer_email - + def update_email(self, new_email): from apps.social.models import MSocialProfile @@ -982,14 +1079,14 @@ def update_email(self, new_email): self.user.email = new_email self.user.save() - + sp = MSocialProfile.get_user(self.user.pk) sp.email = new_email sp.save() if self.stripe_id: stripe_customer = self.stripe_customer() - stripe_customer.update({'email': new_email}) + stripe_customer.update({"email": new_email}) stripe_customer.save() def stripe_customer(self): @@ -997,71 +1094,85 @@ def stripe_customer(self): stripe.api_key = settings.STRIPE_SECRET stripe_customer = stripe.Customer.retrieve(self.stripe_id) return stripe_customer - + def paypal_api(self): if self.paypal_sub_id: - api = paypalrestsdk.Api({ - "mode": "sandbox" if settings.DEBUG else "live", - "client_id": settings.PAYPAL_API_CLIENTID, - "client_secret": settings.PAYPAL_API_SECRET - }) + api = paypalrestsdk.Api( + { + "mode": "sandbox" if settings.DEBUG else "live", + "client_id": settings.PAYPAL_API_CLIENTID, + "client_secret": settings.PAYPAL_API_SECRET, + } + ) return api - + def activate_ios_premium(self, transaction_identifier=None, amount=36): - payments = PaymentHistory.objects.filter(user=self.user, - payment_identifier=transaction_identifier, - payment_date__gte=datetime.datetime.now()-datetime.timedelta(days=3)) + payments = PaymentHistory.objects.filter( + user=self.user, + payment_identifier=transaction_identifier, + payment_date__gte=datetime.datetime.now() - datetime.timedelta(days=3), + ) if len(payments): # Already paid - logging.user(self.user, "~FG~BBAlready paid iOS premium subscription: $%s~FW" % transaction_identifier) + logging.user( + self.user, "~FG~BBAlready paid iOS premium subscription: $%s~FW" % transaction_identifier + ) return False - PaymentHistory.objects.create(user=self.user, - payment_date=datetime.datetime.now(), - payment_amount=amount, - payment_provider='ios-subscription', - payment_identifier=transaction_identifier) - + PaymentHistory.objects.create( + user=self.user, + payment_date=datetime.datetime.now(), + payment_amount=amount, + payment_provider="ios-subscription", + payment_identifier=transaction_identifier, + ) + self.setup_premium_history() - + if not self.is_premium: self.activate_premium() - + logging.user(self.user, "~FG~BBNew iOS premium subscription: $%s~FW" % amount) return True - + def activate_android_premium(self, order_id=None, amount=36): - payments = PaymentHistory.objects.filter(user=self.user, - payment_identifier=order_id, - payment_date__gte=datetime.datetime.now()-datetime.timedelta(days=3)) + payments = PaymentHistory.objects.filter( + user=self.user, + payment_identifier=order_id, + payment_date__gte=datetime.datetime.now() - datetime.timedelta(days=3), + ) if len(payments): # Already paid logging.user(self.user, "~FG~BBAlready paid Android premium subscription: $%s~FW" % amount) return False - PaymentHistory.objects.create(user=self.user, - payment_date=datetime.datetime.now(), - payment_amount=amount, - payment_provider='android-subscription', - payment_identifier=order_id) - + PaymentHistory.objects.create( + user=self.user, + payment_date=datetime.datetime.now(), + payment_amount=amount, + payment_provider="android-subscription", + payment_identifier=order_id, + ) + self.setup_premium_history() - + if order_id == "nb.premium.archive.99": self.activate_archive() elif not self.is_premium: self.activate_premium() - + logging.user(self.user, "~FG~BBNew Android premium subscription: $%s~FW" % amount) return True - + @classmethod def clear_dead_spammers(self, days=30, confirm=False): - users = User.objects.filter(date_joined__gte=datetime.datetime.now()-datetime.timedelta(days=days)).order_by('-date_joined') + users = User.objects.filter( + date_joined__gte=datetime.datetime.now() - datetime.timedelta(days=days) + ).order_by("-date_joined") usernames = set() - numerics = re.compile(r'[0-9]+') + numerics = re.compile(r"[0-9]+") for user in users: - opens = UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'] + opens = UserSubscription.objects.filter(user=user).aggregate(sum=Sum("feed_opens"))["sum"] reads = RUserStory.read_story_count(user.pk) has_numbers = numerics.search(user.username) @@ -1069,7 +1180,9 @@ def clear_dead_spammers(self, days=30, confirm=False): has_profile = user.profile.last_seen_ip except Profile.DoesNotExist: usernames.add(user.username) - print(" ---> Missing profile: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads)) + print( + " ---> Missing profile: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads) + ) continue if opens is None and not reads and has_numbers: @@ -1078,9 +1191,10 @@ def clear_dead_spammers(self, days=30, confirm=False): elif not has_profile: usernames.add(user.username) print(" ---> No IP: %-20s %-30s %-6s %-6s" % (user.username, user.email, opens, reads)) - - if not confirm: return usernames - + + if not confirm: + return usernames + for username in usernames: try: u = User.objects.get(username=username) @@ -1090,27 +1204,33 @@ def clear_dead_spammers(self, days=30, confirm=False): RNewUserQueue.user_count() RNewUserQueue.activate_all() - + @classmethod def count_feed_subscribers(self, feed_id=None, user_id=None, verbose=True): SUBSCRIBER_EXPIRE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) entire_feed_counted = False - + if verbose: feed = Feed.get_by_id(feed_id) - logging.debug(" ---> [%-30s] ~SN~FBCounting subscribers for feed:~SB~FM%s~SN~FB user:~SB~FM%s" % (feed.log_title[:30], feed_id, user_id)) - + logging.debug( + " ---> [%-30s] ~SN~FBCounting subscribers for feed:~SB~FM%s~SN~FB user:~SB~FM%s" + % (feed.log_title[:30], feed_id, user_id) + ) + if feed_id: feed_ids = [feed_id] elif user_id: - feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user_id, active=True).values('feed_id')] + feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user_id, active=True).values("feed_id") + ] else: assert False, "feed_id or user_id required" if feed_id and not user_id: entire_feed_counted = True - + for feed_id in feed_ids: total = 0 premium = 0 @@ -1118,20 +1238,26 @@ def count_feed_subscribers(self, feed_id=None, user_id=None, verbose=True): active_premium = 0 archive = 0 pro = 0 - key = 's:%s' % feed_id - premium_key = 'sp:%s' % feed_id - archive_key = 'sarchive:%s' % feed_id - pro_key = 'spro:%s' % feed_id - + key = "s:%s" % feed_id + premium_key = "sp:%s" % feed_id + archive_key = "sarchive:%s" % feed_id + pro_key = "spro:%s" % feed_id + if user_id: - active = UserSubscription.objects.get(feed_id=feed_id, user_id=user_id).only('active').active + active = UserSubscription.objects.get(feed_id=feed_id, user_id=user_id).only("active").active user_active_feeds = dict([(user_id, active)]) else: - user_active_feeds = dict([(us.user_id, us.active) - for us in UserSubscription.objects.filter(feed_id=feed_id).only('user', 'active')]) - profiles = Profile.objects.filter(user_id__in=list(user_active_feeds.keys())).values('user_id', 'last_seen_on', 'is_premium', 'is_archive', 'is_pro') + user_active_feeds = dict( + [ + (us.user_id, us.active) + for us in UserSubscription.objects.filter(feed_id=feed_id).only("user", "active") + ] + ) + profiles = Profile.objects.filter(user_id__in=list(user_active_feeds.keys())).values( + "user_id", "last_seen_on", "is_premium", "is_archive", "is_pro" + ) feed = Feed.get_by_id(feed_id) - + if entire_feed_counted: pipeline = r.pipeline() pipeline.delete(key) @@ -1139,150 +1265,167 @@ def count_feed_subscribers(self, feed_id=None, user_id=None, verbose=True): pipeline.delete(archive_key) pipeline.delete(pro_key) pipeline.execute() - + for profiles_group in chunks(profiles, 20): pipeline = r.pipeline() for profile in profiles_group: - last_seen_on = int(profile['last_seen_on'].strftime('%s')) - muted_feed = not bool(user_active_feeds[profile['user_id']]) + last_seen_on = int(profile["last_seen_on"].strftime("%s")) + muted_feed = not bool(user_active_feeds[profile["user_id"]]) if muted_feed: last_seen_on = 0 - pipeline.zadd(key, { profile['user_id']: last_seen_on }) + pipeline.zadd(key, {profile["user_id"]: last_seen_on}) total += 1 - if profile['is_premium']: - pipeline.zadd(premium_key, { profile['user_id']: last_seen_on }) + if profile["is_premium"]: + pipeline.zadd(premium_key, {profile["user_id"]: last_seen_on}) premium += 1 else: - pipeline.zrem(premium_key, profile['user_id']) - if profile['is_archive']: - pipeline.zadd(archive_key, { profile['user_id']: last_seen_on }) + pipeline.zrem(premium_key, profile["user_id"]) + if profile["is_archive"]: + pipeline.zadd(archive_key, {profile["user_id"]: last_seen_on}) archive += 1 else: - pipeline.zrem(archive_key, profile['user_id']) - if profile['is_pro']: - pipeline.zadd(pro_key, { profile['user_id']: last_seen_on }) + pipeline.zrem(archive_key, profile["user_id"]) + if profile["is_pro"]: + pipeline.zadd(pro_key, {profile["user_id"]: last_seen_on}) pro += 1 else: - pipeline.zrem(pro_key, profile['user_id']) - if profile['last_seen_on'] > SUBSCRIBER_EXPIRE and not muted_feed: + pipeline.zrem(pro_key, profile["user_id"]) + if profile["last_seen_on"] > SUBSCRIBER_EXPIRE and not muted_feed: active += 1 - if profile['is_premium']: + if profile["is_premium"]: active_premium += 1 - + pipeline.execute() - + if entire_feed_counted: - now = int(datetime.datetime.now().strftime('%s')) - r.zadd(key, { -1: now }) - r.expire(key, settings.SUBSCRIBER_EXPIRE*24*60*60) + now = int(datetime.datetime.now().strftime("%s")) + r.zadd(key, {-1: now}) + r.expire(key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(premium_key, {-1: now}) - r.expire(premium_key, settings.SUBSCRIBER_EXPIRE*24*60*60) + r.expire(premium_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(archive_key, {-1: now}) - r.expire(archive_key, settings.SUBSCRIBER_EXPIRE*24*60*60) + r.expire(archive_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) r.zadd(pro_key, {-1: now}) - r.expire(pro_key, settings.SUBSCRIBER_EXPIRE*24*60*60) - - logging.info(" ---> [%-30s] ~SN~FBCounting subscribers, storing in ~SBredis~SN: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" % - (feed.log_title[:30], total, active, premium, active_premium, archive, pro)) + r.expire(pro_key, settings.SUBSCRIBER_EXPIRE * 24 * 60 * 60) + + logging.info( + " ---> [%-30s] ~SN~FBCounting subscribers, storing in ~SBredis~SN: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" + % (feed.log_title[:30], total, active, premium, active_premium, archive, pro) + ) @classmethod def count_all_feed_subscribers_for_user(self, user): r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) if not isinstance(user, User): user = User.objects.get(pk=user) - - active_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=True).values('feed_id')] - muted_feed_ids = [us['feed_id'] for us in UserSubscription.objects.filter(user=user.pk, active=False).values('feed_id')] - logging.user(user, "~SN~FBRefreshing user last_login_on for ~SB%s~SN/~SB%s subscriptions~SN" % - (len(active_feed_ids), len(muted_feed_ids))) + + active_feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user.pk, active=True).values("feed_id") + ] + muted_feed_ids = [ + us["feed_id"] + for us in UserSubscription.objects.filter(user=user.pk, active=False).values("feed_id") + ] + logging.user( + user, + "~SN~FBRefreshing user last_login_on for ~SB%s~SN/~SB%s subscriptions~SN" + % (len(active_feed_ids), len(muted_feed_ids)), + ) for feed_ids in [active_feed_ids, muted_feed_ids]: for feeds_group in chunks(feed_ids, 20): pipeline = r.pipeline() for feed_id in feeds_group: - key = 's:%s' % feed_id - premium_key = 'sp:%s' % feed_id - archive_key = 'sarchive:%s' % feed_id - pro_key = 'spro:%s' % feed_id + key = "s:%s" % feed_id + premium_key = "sp:%s" % feed_id + archive_key = "sarchive:%s" % feed_id + pro_key = "spro:%s" % feed_id - last_seen_on = int(user.profile.last_seen_on.strftime('%s')) + last_seen_on = int(user.profile.last_seen_on.strftime("%s")) if feed_ids is muted_feed_ids: last_seen_on = 0 - pipeline.zadd(key, { user.pk: last_seen_on }) + pipeline.zadd(key, {user.pk: last_seen_on}) if user.profile.is_premium: - pipeline.zadd(premium_key, { user.pk: last_seen_on }) + pipeline.zadd(premium_key, {user.pk: last_seen_on}) else: pipeline.zrem(premium_key, user.pk) if user.profile.is_archive: - pipeline.zadd(archive_key, { user.pk: last_seen_on }) + pipeline.zadd(archive_key, {user.pk: last_seen_on}) else: pipeline.zrem(archive_key, user.pk) if user.profile.is_pro: - pipeline.zadd(pro_key, { user.pk: last_seen_on }) + pipeline.zadd(pro_key, {user.pk: last_seen_on}) else: pipeline.zrem(pro_key, user.pk) pipeline.execute() - + def send_new_user_email(self): if not self.user.email or not self.send_emails: return - - user = self.user - text = render_to_string('mail/email_new_account.txt', locals()) - html = render_to_string('mail/email_new_account.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_new_account.txt", locals()) + html = render_to_string("mail/email_new_account.xhtml", locals()) subject = "Welcome to NewsBlur, %s" % (self.user.username) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new user: %s" % self.user.email) - + def send_opml_export_email(self, reason=None, force=False): if not self.user.email: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='opml_export') + + emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, email_type="opml_export") day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~SN~FMNot sending opml export email, already sent today.") return - MSentEmail.record(receiver_user_id=self.user.pk, email_type='opml_export') - + MSentEmail.record(receiver_user_id=self.user.pk, email_type="opml_export") + exporter = OPMLExporter(self.user) - opml = exporter.process() + opml = exporter.process() params = { - 'feed_count': UserSubscription.objects.filter(user=self.user).count(), - 'reason': reason, + "feed_count": UserSubscription.objects.filter(user=self.user).count(), + "reason": reason, } - user = self.user - text = render_to_string('mail/email_opml_export.txt', params) - html = render_to_string('mail/email_opml_export.xhtml', params) + user = self.user + text = render_to_string("mail/email_opml_export.txt", params) + html = render_to_string("mail/email_opml_export.xhtml", params) subject = "Backup OPML file of your NewsBlur sites" - filename= 'NewsBlur Subscriptions - %s.xml' % datetime.datetime.now().strftime('%Y-%m-%d') - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + filename = "NewsBlur Subscriptions - %s.xml" % datetime.datetime.now().strftime("%Y-%m-%d") + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") - msg.attach(filename, opml, 'text/xml') + msg.attach(filename, opml, "text/xml") msg.send() - + from apps.social.models import MActivity + MActivity.new_opml_export(user_id=self.user.pk, count=exporter.feed_count, automated=True) - + logging.user(self.user, "~BB~FM~SBSending OPML backup email to: %s" % self.user.email) - + def send_first_share_to_blurblog_email(self, force=False): from apps.social.models import MSocialProfile, MSharedStory - + if not self.user.email: return - - params = dict(receiver_user_id=self.user.pk, email_type='first_share') + + params = dict(receiver_user_id=self.user.pk, email_type="first_share") try: MSentEmail.objects.get(**params) if not force: @@ -1290,30 +1433,33 @@ def send_first_share_to_blurblog_email(self, force=False): return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - + social_profile = MSocialProfile.objects.get(user_id=self.user.pk) params = { - 'shared_stories': MSharedStory.objects.filter(user_id=self.user.pk).count(), - 'blurblog_url': social_profile.blurblog_url, - 'blurblog_rss': social_profile.blurblog_rss + "shared_stories": MSharedStory.objects.filter(user_id=self.user.pk).count(), + "blurblog_url": social_profile.blurblog_url, + "blurblog_rss": social_profile.blurblog_rss, } - user = self.user - text = render_to_string('mail/email_first_share_to_blurblog.txt', params) - html = render_to_string('mail/email_first_share_to_blurblog.xhtml', params) + user = self.user + text = render_to_string("mail/email_first_share_to_blurblog.txt", params) + html = render_to_string("mail/email_first_share_to_blurblog.xhtml", params) subject = "Your shared stories on NewsBlur are available on your Blurblog" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending first share to blurblog email to: %s" % self.user.email) - - def send_new_premium_email(self, force=False): + + def send_new_premium_email(self, force=False): if not self.user.email or not self.send_emails: return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium') + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium") try: MSentEmail.objects.get(**params) if not force: @@ -1322,52 +1468,66 @@ def send_new_premium_email(self, force=False): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_premium.txt', locals()) - html = render_to_string('mail/email_new_premium.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_premium.txt", locals()) + html = render_to_string("mail/email_new_premium.xhtml", locals()) subject = "Thank you for subscribing to NewsBlur Premium!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new premium: %s" % self.user.email) - + def send_new_premium_archive_email(self, total_story_count, pre_archive_count, force=False): if not self.user.email: return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium_archive') + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium_archive") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~BB~FMNot ~SBSending email for new premium archive: %s (%s to %s stories)" % (self.user.email, pre_archive_count, total_story_count)) + logging.user( + self.user, + "~BB~FMNot ~SBSending email for new premium archive: %s (%s to %s stories)" + % (self.user.email, pre_archive_count, total_story_count), + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) feed_count = UserSubscription.objects.filter(user=self.user).count() - user = self.user - text = render_to_string('mail/email_new_premium_archive.txt', locals()) - html = render_to_string('mail/email_new_premium_archive.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_premium_archive.txt", locals()) + html = render_to_string("mail/email_new_premium_archive.xhtml", locals()) if total_story_count > pre_archive_count: subject = f"NewsBlur archive backfill is complete: from {pre_archive_count:,} to {total_story_count:,} stories" else: subject = f"NewsBlur archive backfill is complete: {total_story_count:,} stories" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending email for new premium archive: %s (%s to %s stories)" % (self.user.email, pre_archive_count, total_story_count)) - + + logging.user( + self.user, + "~BB~FM~SBSending email for new premium archive: %s (%s to %s stories)" + % (self.user.email, pre_archive_count, total_story_count), + ) + def send_new_premium_pro_email(self, force=False): if not self.user.email or not self.send_emails: return - - params = dict(receiver_user_id=self.user.pk, email_type='new_premium_pro') + + params = dict(receiver_user_id=self.user.pk, email_type="new_premium_pro") try: MSentEmail.objects.get(**params) if not force: @@ -1376,45 +1536,51 @@ def send_new_premium_pro_email(self, force=False): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_premium_pro.txt', locals()) - html = render_to_string('mail/email_new_premium_pro.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_premium_pro.txt", locals()) + html = render_to_string("mail/email_new_premium_pro.xhtml", locals()) subject = "Thanks for subscribing to NewsBlur Premium Pro!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new premium pro: %s" % self.user.email) - + def send_forgot_password_email(self, email=None): if not self.user.email and not email: print("Please provide an email address.") return - + if not self.user.email and email: self.user.email = email self.user.save() - - user = self.user - text = render_to_string('mail/email_forgot_password.txt', locals()) - html = render_to_string('mail/email_forgot_password.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_forgot_password.txt", locals()) + html = render_to_string("mail/email_forgot_password.xhtml", locals()) subject = "Forgot your password on NewsBlur?" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for forgotten password: %s" % self.user.email) - + def send_new_user_queue_email(self, force=False): if not self.user.email: print("Please provide an email address.") return - - params = dict(receiver_user_id=self.user.pk, email_type='new_user_queue') + + params = dict(receiver_user_id=self.user.pk, email_type="new_user_queue") try: MSentEmail.objects.get(**params) if not force: @@ -1423,238 +1589,306 @@ def send_new_user_queue_email(self, force=False): except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - user = self.user - text = render_to_string('mail/email_new_user_queue.txt', locals()) - html = render_to_string('mail/email_new_user_queue.xhtml', locals()) + user = self.user + text = render_to_string("mail/email_new_user_queue.txt", locals()) + html = render_to_string("mail/email_new_user_queue.xhtml", locals()) subject = "Your free account is now ready to go on NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for new user queue: %s" % self.user.email) - + def send_upload_opml_finished_email(self, feed_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_upload_opml_finished.txt', locals()) - html = render_to_string('mail/email_upload_opml_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_upload_opml_finished.txt", locals()) + html = render_to_string("mail/email_upload_opml_finished.xhtml", locals()) subject = "Your OPML upload is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for OPML upload: %s" % self.user.email) - + def send_import_reader_finished_email(self, feed_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_import_reader_finished.txt', locals()) - html = render_to_string('mail/email_import_reader_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_import_reader_finished.txt", locals()) + html = render_to_string("mail/email_import_reader_finished.xhtml", locals()) subject = "Your Google Reader import is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + logging.user(self.user, "~BB~FM~SBSending email for Google Reader import: %s" % self.user.email) - + def send_import_reader_starred_finished_email(self, feed_count, starred_count): if not self.user.email: print("Please provide an email address.") return - - user = self.user - text = render_to_string('mail/email_import_reader_starred_finished.txt', locals()) - html = render_to_string('mail/email_import_reader_starred_finished.xhtml', locals()) + + user = self.user + text = render_to_string("mail/email_import_reader_starred_finished.txt", locals()) + html = render_to_string("mail/email_import_reader_starred_finished.xhtml", locals()) subject = "Your Google Reader starred stories import is complete. Get going with NewsBlur!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending email for Google Reader starred stories import: %s" % self.user.email) - + + logging.user( + self.user, "~BB~FM~SBSending email for Google Reader starred stories import: %s" % self.user.email + ) + def send_launch_social_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_social') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_social") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_social.txt', data) - html = render_to_string('mail/email_launch_social.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_social.txt", data) + html = render_to_string("mail/email_launch_social.xhtml", data) subject = "NewsBlur is now a social news reader" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch social email for user: %s months, %s" % (months_ago, self.user.email)) - + + logging.user( + self.user, + "~BB~FM~SBSending launch social email for user: %s months, %s" % (months_ago, self.user.email), + ) + def send_launch_turntouch_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_turntouch') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_turntouch") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch social email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_turntouch.txt', data) - html = render_to_string('mail/email_launch_turntouch.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_turntouch.txt", data) + html = render_to_string("mail/email_launch_turntouch.xhtml", data) subject = "Introducing Turn Touch for NewsBlur" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch TT email for user: %s months, %s" % (months_ago, self.user.email)) + + logging.user( + self.user, + "~BB~FM~SBSending launch TT email for user: %s months, %s" % (months_ago, self.user.email), + ) def send_launch_turntouch_end_email(self, force=False): if not self.user.email or not self.send_emails: - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT end email for user, %s: %s" % (self.user.email and 'opt-out: ' or 'blank', self.user.email)) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT end email for user, %s: %s" + % (self.user.email and "opt-out: " or "blank", self.user.email), + ) return - - params = dict(receiver_user_id=self.user.pk, email_type='launch_turntouch_end') + + params = dict(receiver_user_id=self.user.pk, email_type="launch_turntouch_end") try: MSentEmail.objects.get(**params) if not force: # Return if email already sent - logging.user(self.user, "~FM~SB~FRNot~FM sending launch TT end email for user, sent already: %s" % self.user.email) + logging.user( + self.user, + "~FM~SB~FRNot~FM sending launch TT end email for user, sent already: %s" + % self.user.email, + ) return except MSentEmail.DoesNotExist: MSentEmail.objects.create(**params) - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = delta.days / 30 - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_launch_turntouch_end.txt', data) - html = render_to_string('mail/email_launch_turntouch_end.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_launch_turntouch_end.txt", data) + html = render_to_string("mail/email_launch_turntouch_end.xhtml", data) subject = "Last day to back Turn Touch: NewsBlur's beautiful remote" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - logging.user(self.user, "~BB~FM~SBSending launch TT end email for user: %s months, %s" % (months_ago, self.user.email)) - + + logging.user( + self.user, + "~BB~FM~SBSending launch TT end email for user: %s months, %s" % (months_ago, self.user.email), + ) + def grace_period_email_sent(self, force=False): - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='premium_expire_grace') + emails_sent = MSentEmail.objects.filter( + receiver_user_id=self.user.pk, email_type="premium_expire_grace" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=360) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~SN~FMNot sending premium expire grace email, already sent before.") return True - + def send_premium_expire_grace_period_email(self, force=False): if not self.user.email: - logging.user(self.user, "~FM~SB~FRNot~FM~SN sending premium expire grace for user: %s" % (self.user)) + logging.user( + self.user, "~FM~SB~FRNot~FM~SN sending premium expire grace for user: %s" % (self.user) + ) return if self.grace_period_email_sent(force=force): return - + if self.premium_expire and self.premium_expire < datetime.datetime.now(): self.premium_expire = datetime.datetime.now() self.save() - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = round(delta.days / 30) - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_premium_expire_grace.txt', data) - html = render_to_string('mail/email_premium_expire_grace.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_premium_expire_grace.txt", data) + html = render_to_string("mail/email_premium_expire_grace.xhtml", data) subject = "Your premium account on NewsBlur has one more month!" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=self.user.pk, email_type='premium_expire_grace') - logging.user(self.user, "~BB~FM~SBSending premium expire grace email for user: %s months, %s" % (months_ago, self.user.email)) - + + MSentEmail.record(receiver_user_id=self.user.pk, email_type="premium_expire_grace") + logging.user( + self.user, + "~BB~FM~SBSending premium expire grace email for user: %s months, %s" + % (months_ago, self.user.email), + ) + def send_premium_expire_email(self, force=False): if not self.user.email: logging.user(self.user, "~FM~SB~FRNot~FM sending premium expire for user: %s" % (self.user)) return - emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, - email_type='premium_expire') + emails_sent = MSentEmail.objects.filter(receiver_user_id=self.user.pk, email_type="premium_expire") day_ago = datetime.datetime.now() - datetime.timedelta(days=360) for email in emails_sent: if email.date_sent > day_ago and not force: logging.user(self.user, "~FM~SBNot sending premium expire email, already sent before.") return - - delta = datetime.datetime.now() - self.last_seen_on + + delta = datetime.datetime.now() - self.last_seen_on months_ago = round(delta.days / 30) - user = self.user - data = dict(user=user, months_ago=months_ago) - text = render_to_string('mail/email_premium_expire.txt', data) - html = render_to_string('mail/email_premium_expire.xhtml', data) + user = self.user + data = dict(user=user, months_ago=months_ago) + text = render_to_string("mail/email_premium_expire.txt", data) + html = render_to_string("mail/email_premium_expire.xhtml", data) subject = "Your premium account on NewsBlur has expired" - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=self.user.pk, email_type='premium_expire') - logging.user(self.user, "~BB~FM~SBSending premium expire email for user: %s months, %s" % (months_ago, self.user.email)) - + + MSentEmail.record(receiver_user_id=self.user.pk, email_type="premium_expire") + logging.user( + self.user, + "~BB~FM~SBSending premium expire email for user: %s months, %s" % (months_ago, self.user.email), + ) + def autologin_url(self, next=None): - return reverse('autologin', kwargs={ - 'username': self.user.username, - 'secret': self.secret_token - }) + ('?' + next + '=1' if next else '') - - + return reverse("autologin", kwargs={"username": self.user.username, "secret": self.secret_token}) + ( + "?" + next + "=1" if next else "" + ) + @classmethod def doublecheck_paypal_payments(cls, days=14): - payments = PayPalIPN.objects.filter(txn_type='subscr_payment', - updated_at__gte=datetime.datetime.now() - - datetime.timedelta(days) - ).order_by('-created_at') + payments = PayPalIPN.objects.filter( + txn_type="subscr_payment", updated_at__gte=datetime.datetime.now() - datetime.timedelta(days) + ).order_by("-created_at") for payment in payments: try: profile = Profile.objects.get(user__username=payment.custom) @@ -1662,10 +1896,10 @@ def doublecheck_paypal_payments(cls, days=14): logging.debug(" ---> ~FRCouldn't find user: ~SB~FC%s" % payment.custom) continue profile.setup_premium_history() - + class StripeIds(models.Model): - user = models.ForeignKey(User, related_name='stripe_ids', on_delete=models.CASCADE, null=True) + user = models.ForeignKey(User, related_name="stripe_ids", on_delete=models.CASCADE, null=True) stripe_id = models.CharField(max_length=24, blank=True, null=True) def __str__(self): @@ -1673,18 +1907,20 @@ def __str__(self): class PaypalIds(models.Model): - user = models.ForeignKey(User, related_name='paypal_ids', on_delete=models.CASCADE, null=True) + user = models.ForeignKey(User, related_name="paypal_ids", on_delete=models.CASCADE, null=True) paypal_sub_id = models.CharField(max_length=24, blank=True, null=True) def __str__(self): return "%s: %s" % (self.user.username, self.paypal_sub_id) - + def create_profile(sender, instance, created, **kwargs): if created: Profile.objects.create(user=instance) else: Profile.objects.get_or_create(user=instance) + + post_save.connect(create_profile, sender=User) @@ -1702,7 +1938,7 @@ def paypal_signup(sender, **kwargs): user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: pass - + if not user and ipn_obj.custom: try: user = User.objects.get(pk=ipn_obj.custom) @@ -1716,9 +1952,10 @@ def paypal_signup(sender, **kwargs): pass if not user: - logging.debug(" ---> Paypal subscription not found during paypal_signup: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during paypal_signup: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} logging.user(user, "~BC~SB~FBPaypal subscription signup") @@ -1733,8 +1970,11 @@ def paypal_signup(sender, **kwargs): # user.profile.cancel_premium_paypal(second_most_recent_only=True) # assert False, "Shouldn't be here anymore as the new Paypal REST API uses webhooks" + + valid_ipn_received.connect(paypal_signup) + def paypal_payment_history_sync(sender, **kwargs): ipn_obj = sender try: @@ -1743,9 +1983,10 @@ def paypal_payment_history_sync(sender, **kwargs): try: user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: - logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during flagging: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} logging.user(user, "~BC~SB~FBPaypal subscription payment") @@ -1753,8 +1994,11 @@ def paypal_payment_history_sync(sender, **kwargs): user.profile.setup_premium_history() except: return {"code": -1, "message": "User doesn't exist."} + + valid_ipn_received.connect(paypal_payment_history_sync) + def paypal_payment_was_flagged(sender, **kwargs): ipn_obj = sender try: @@ -1763,27 +2007,31 @@ def paypal_payment_was_flagged(sender, **kwargs): try: user = User.objects.get(email__iexact=ipn_obj.payer_email) except User.DoesNotExist: - logging.debug(" ---> Paypal subscription not found during flagging: %s/%s" % ( - ipn_obj.payer_email, - ipn_obj.custom)) + logging.debug( + " ---> Paypal subscription not found during flagging: %s/%s" + % (ipn_obj.payer_email, ipn_obj.custom) + ) return {"code": -1, "message": "User doesn't exist."} - + try: user.profile.setup_premium_history() logging.user(user, "~BC~SB~FBPaypal subscription payment flagged") except: return {"code": -1, "message": "User doesn't exist."} + + invalid_ipn_received.connect(paypal_payment_was_flagged) + def stripe_checkout_session_completed(sender, full_json, **kwargs): - newsblur_user_id = full_json['data']['object']['metadata']['newsblur_user_id'] - stripe_id = full_json['data']['object']['customer'] + newsblur_user_id = full_json["data"]["object"]["metadata"]["newsblur_user_id"] + stripe_id = full_json["data"]["object"]["customer"] profile = None try: profile = Profile.objects.get(stripe_id=stripe_id) except Profile.DoesNotExist: pass - + if not profile: try: profile = User.objects.get(pk=int(newsblur_user_id)).profile @@ -1791,46 +2039,56 @@ def stripe_checkout_session_completed(sender, full_json, **kwargs): profile.save() except User.DoesNotExist: pass - + if profile: logging.user(profile.user, "~BC~SB~FBStripe checkout subscription signup") profile.retrieve_stripe_ids() else: logging.user(profile.user, "~BR~SB~FRCouldn't find Stripe user: ~FW%s" % full_json) return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_checkout_session_completed.connect(stripe_checkout_session_completed) + def stripe_signup(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] - plan_id = full_json['data']['object']['plan']['id'] + stripe_id = full_json["data"]["object"]["customer"] + plan_id = full_json["data"]["object"]["plan"]["id"] try: profile = Profile.objects.get(stripe_id=stripe_id) logging.user(profile.user, "~BC~SB~FBStripe subscription signup") - if plan_id == Profile.plan_to_stripe_price('premium'): + if plan_id == Profile.plan_to_stripe_price("premium"): profile.activate_premium() - elif plan_id == Profile.plan_to_stripe_price('archive'): + elif plan_id == Profile.plan_to_stripe_price("archive"): profile.activate_archive() - elif plan_id == Profile.plan_to_stripe_price('pro'): + elif plan_id == Profile.plan_to_stripe_price("pro"): profile.activate_pro() profile.cancel_premium_paypal() profile.retrieve_stripe_ids() except Profile.DoesNotExist: return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_customer_subscription_created.connect(stripe_signup) + def stripe_subscription_updated(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] - plan_id = full_json['data']['object']['plan']['id'] + stripe_id = full_json["data"]["object"]["customer"] + plan_id = full_json["data"]["object"]["plan"]["id"] try: profile = Profile.objects.get(stripe_id=stripe_id) - active = not full_json['data']['object']['cancel_at'] and full_json['data']['object']['plan']['active'] - logging.user(profile.user, "~BC~SB~FBStripe subscription updated: %s" % "active" if active else "cancelled") + active = ( + not full_json["data"]["object"]["cancel_at"] and full_json["data"]["object"]["plan"]["active"] + ) + logging.user( + profile.user, "~BC~SB~FBStripe subscription updated: %s" % "active" if active else "cancelled" + ) if active: - if plan_id == Profile.plan_to_stripe_price('premium'): + if plan_id == Profile.plan_to_stripe_price("premium"): profile.activate_premium() - elif plan_id == Profile.plan_to_stripe_price('archive'): + elif plan_id == Profile.plan_to_stripe_price("archive"): profile.activate_archive() - elif plan_id == Profile.plan_to_stripe_price('pro'): + elif plan_id == Profile.plan_to_stripe_price("pro"): profile.activate_pro() profile.cancel_premium_paypal() profile.retrieve_stripe_ids() @@ -1838,19 +2096,25 @@ def stripe_subscription_updated(sender, full_json, **kwargs): profile.setup_premium_history() except Profile.DoesNotExist: return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_customer_subscription_updated.connect(stripe_subscription_updated) + def stripe_payment_history_sync(sender, full_json, **kwargs): - stripe_id = full_json['data']['object']['customer'] + stripe_id = full_json["data"]["object"]["customer"] try: profile = Profile.objects.get(stripe_id=stripe_id) logging.user(profile.user, "~BC~SB~FBStripe subscription payment") profile.setup_premium_history() except Profile.DoesNotExist: - return {"code": -1, "message": "User doesn't exist."} + return {"code": -1, "message": "User doesn't exist."} + + zebra_webhook_charge_succeeded.connect(stripe_payment_history_sync) zebra_webhook_charge_refunded.connect(stripe_payment_history_sync) + def change_password(user, old_password, new_password, only_check=False): user_db = authenticate(username=user.username, password=old_password) if user_db is None: @@ -1860,7 +2124,7 @@ def change_password(user, old_password, new_password, only_check=False): user.save() if user_db is None: user_db = authenticate(username=user.username, password=user.username) - + if not user_db: return -1 else: @@ -1869,48 +2133,53 @@ def change_password(user, old_password, new_password, only_check=False): user_db.save() return 1 + def blank_authenticate(username, password=""): try: user = User.objects.get(username__iexact=username) except User.DoesNotExist: return - + if user.password == "!": return user - - algorithm, salt, hash = user.password.split('$', 2) - encoded_blank = hashlib.sha1((salt + password).encode(encoding='utf-8')).hexdigest() + + algorithm, salt, hash = user.password.split("$", 2) + encoded_blank = hashlib.sha1((salt + password).encode(encoding="utf-8")).hexdigest() encoded_username = authenticate(username=username, password=username) if encoded_blank == hash or encoded_username == user: return user + # Unfinished class MEmailUnsubscribe(mongo.Document): user_id = mongo.IntField() email_type = mongo.StringField() date = mongo.DateTimeField(default=datetime.datetime.now) - - EMAIL_TYPE_FOLLOWS = 'follows' - EMAIL_TYPE_REPLIES = 'replies' - EMAIL_TYOE_PRODUCT = 'product' - + + EMAIL_TYPE_FOLLOWS = "follows" + EMAIL_TYPE_REPLIES = "replies" + EMAIL_TYOE_PRODUCT = "product" + meta = { - 'collection': 'email_unsubscribes', - 'allow_inheritance': False, - 'indexes': ['user_id', - {'fields': ['user_id', 'email_type'], - 'unique': True, - }], + "collection": "email_unsubscribes", + "allow_inheritance": False, + "indexes": [ + "user_id", + { + "fields": ["user_id", "email_type"], + "unique": True, + }, + ], } - + def __str__(self): return "%s unsubscribed from %s on %s" % (self.user_id, self.email_type, self.date) - + @classmethod def user(cls, user_id): unsubs = cls.objects(user_id=user_id) return unsubs - + @classmethod def unsubscribe(cls, user_id, email_type): cls.objects.create() @@ -1921,13 +2190,13 @@ class MSentEmail(mongo.Document): receiver_user_id = mongo.IntField() email_type = mongo.StringField() date_sent = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'sent_emails', - 'allow_inheritance': False, - 'indexes': ['sending_user_id', 'receiver_user_id', 'email_type'], + "collection": "sent_emails", + "allow_inheritance": False, + "indexes": ["sending_user_id", "receiver_user_id", "email_type"], } - + def __str__(self): sender_user = self.sending_user_id if sender_user: @@ -1935,61 +2204,83 @@ def __str__(self): receiver_user = self.receiver_user_id if receiver_user: receiver_user = User.objects.get(pk=self.receiver_user_id) - return "%s sent %s email to %s %s" % (sender_user, self.email_type, receiver_user, receiver_user.profile if receiver_user else receiver_user) - + return "%s sent %s email to %s %s" % ( + sender_user, + self.email_type, + receiver_user, + receiver_user.profile if receiver_user else receiver_user, + ) + @classmethod def record(cls, email_type, receiver_user_id, sending_user_id=None): - cls.objects.create(email_type=email_type, - receiver_user_id=receiver_user_id, - sending_user_id=sending_user_id) + cls.objects.create( + email_type=email_type, receiver_user_id=receiver_user_id, sending_user_id=sending_user_id + ) + class PaymentHistory(models.Model): - user = models.ForeignKey(User, related_name='payments', on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="payments", on_delete=models.CASCADE) payment_date = models.DateTimeField() payment_amount = models.IntegerField() payment_provider = models.CharField(max_length=20) payment_identifier = models.CharField(max_length=100, null=True) refunded = models.BooleanField(blank=True, null=True) - + def __str__(self): - return "[%s] $%s/%s %s" % (self.payment_date.strftime("%Y-%m-%d"), self.payment_amount, - self.payment_provider, "" if self.refunded else "") + return "[%s] $%s/%s %s" % ( + self.payment_date.strftime("%Y-%m-%d"), + self.payment_amount, + self.payment_provider, + "" if self.refunded else "", + ) + class Meta: - ordering = ['-payment_date'] - + ordering = ["-payment_date"] + def canonical(self): return { - 'payment_date': self.payment_date.strftime('%Y-%m-%d'), - 'payment_amount': self.payment_amount, - 'payment_provider': self.payment_provider, - 'refunded': self.refunded, + "payment_date": self.payment_date.strftime("%Y-%m-%d"), + "payment_amount": self.payment_amount, + "payment_provider": self.payment_provider, + "refunded": self.refunded, } - + @classmethod def report(cls, months=26): output = "" - + def _counter(start_date, end_date, output, payments=None): if not payments: - payments = PaymentHistory.objects.filter(payment_date__gte=start_date, payment_date__lte=end_date) - payments = payments.aggregate(avg=Avg('payment_amount'), - sum=Sum('payment_amount'), - count=Count('user')) + payments = PaymentHistory.objects.filter( + payment_date__gte=start_date, payment_date__lte=end_date + ) + payments = payments.aggregate( + avg=Avg("payment_amount"), sum=Sum("payment_amount"), count=Count("user") + ) output += "%s-%02d-%02d - %s-%02d-%02d:\t$%.2f\t$%-6s\t%-4s\n" % ( - start_date.year, start_date.month, start_date.day, - end_date.year, end_date.month, end_date.day, - round(payments['avg'] if payments['avg'] else 0, 2), payments['sum'] if payments['sum'] else 0, payments['count']) - + start_date.year, + start_date.month, + start_date.day, + end_date.year, + end_date.month, + end_date.day, + round(payments["avg"] if payments["avg"] else 0, 2), + payments["sum"] if payments["sum"] else 0, + payments["count"], + ) + return payments, output output += "\nMonthly Totals:\n" for m in reversed(list(range(months))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(months=m) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + months=m + ) end_time = start_date + datetime.timedelta(days=31) end_date = datetime.datetime(end_time.year, end_time.month, 1) - datetime.timedelta(seconds=1) total, output = _counter(start_date, end_date, output) - total = total['sum'] + total = total["sum"] output += "\nMTD Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2001,18 +2292,21 @@ def _counter(start_date, end_date, output, payments=None): this_mtd_count = 0 for y in reversed(list(range(years))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(years=y) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + years=y + ) end_date = now - dateutil.relativedelta.relativedelta(years=y) - if end_date > now: end_date = now + if end_date > now: + end_date = now count, output = _counter(start_date, end_date, output) if end_date.year != now.year: - last_mtd_avg = count['avg'] or 0 - last_mtd_sum = count['sum'] or 0 - last_mtd_count = count['count'] + last_mtd_avg = count["avg"] or 0 + last_mtd_sum = count["sum"] or 0 + last_mtd_count = count["count"] else: - this_mtd_avg = count['avg'] or 0 - this_mtd_sum = count['sum'] or 0 - this_mtd_count = count['count'] + this_mtd_avg = count["avg"] or 0 + this_mtd_sum = count["sum"] or 0 + this_mtd_count = count["count"] output += "\nCurrent Month Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2021,19 +2315,25 @@ def _counter(start_date, end_date, output, payments=None): last_month_count = 0 for y in reversed(list(range(years))): now = datetime.datetime.now() - start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta(years=y) + start_date = datetime.datetime(now.year, now.month, 1) - dateutil.relativedelta.relativedelta( + years=y + ) end_time = start_date + datetime.timedelta(days=31) end_date = datetime.datetime(end_time.year, end_time.month, 1) - datetime.timedelta(seconds=1) if end_date > now: - payments = {'avg': this_mtd_avg / (max(1, last_mtd_avg) / float(max(1, last_month_avg))), - 'sum': int(round(this_mtd_sum / (max(1, last_mtd_sum) / float(max(1, last_month_sum))))), - 'count': int(round(this_mtd_count / (max(1, last_mtd_count) / float(max(1, last_month_count)))))} + payments = { + "avg": this_mtd_avg / (max(1, last_mtd_avg) / float(max(1, last_month_avg))), + "sum": int(round(this_mtd_sum / (max(1, last_mtd_sum) / float(max(1, last_month_sum))))), + "count": int( + round(this_mtd_count / (max(1, last_mtd_count) / float(max(1, last_month_count)))) + ), + } _, output = _counter(start_date, end_date, output, payments=payments) else: count, output = _counter(start_date, end_date, output) - last_month_avg = count['avg'] - last_month_sum = count['sum'] - last_month_count = count['count'] + last_month_avg = count["avg"] + last_month_sum = count["sum"] + last_month_count = count["count"] output += "\nYTD Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2049,13 +2349,13 @@ def _counter(start_date, end_date, output, payments=None): end_date = now - dateutil.relativedelta.relativedelta(years=y) count, output = _counter(start_date, end_date, output) if end_date.year != now.year: - last_ytd_avg = count['avg'] or 0 - last_ytd_sum = count['sum'] or 0 - last_ytd_count = count['count'] + last_ytd_avg = count["avg"] or 0 + last_ytd_sum = count["sum"] or 0 + last_ytd_count = count["count"] else: - this_ytd_avg = count['avg'] or 0 - this_ytd_sum = count['sum'] or 0 - this_ytd_count = count['count'] + this_ytd_avg = count["avg"] or 0 + this_ytd_sum = count["sum"] or 0 + this_ytd_count = count["count"] output += "\nYearly Totals:\n" years = datetime.datetime.now().year - 2009 @@ -2066,26 +2366,33 @@ def _counter(start_date, end_date, output, payments=None): for y in reversed(list(range(years))): now = datetime.datetime.now() start_date = datetime.datetime(now.year, 1, 1) - dateutil.relativedelta.relativedelta(years=y) - end_date = datetime.datetime(now.year, 1, 1) - dateutil.relativedelta.relativedelta(years=y-1) - datetime.timedelta(seconds=1) + end_date = ( + datetime.datetime(now.year, 1, 1) + - dateutil.relativedelta.relativedelta(years=y - 1) + - datetime.timedelta(seconds=1) + ) if end_date > now: - payments = {'avg': this_ytd_avg / (max(1, last_ytd_avg) / float(max(1, last_year_avg))), - 'sum': int(round(this_ytd_sum / (max(1, last_ytd_sum) / float(max(1, last_year_sum))))), - 'count': int(round(this_ytd_count / (max(1, last_ytd_count) / float(max(1, last_year_count)))))} + payments = { + "avg": this_ytd_avg / (max(1, last_ytd_avg) / float(max(1, last_year_avg))), + "sum": int(round(this_ytd_sum / (max(1, last_ytd_sum) / float(max(1, last_year_sum))))), + "count": int( + round(this_ytd_count / (max(1, last_ytd_count) / float(max(1, last_year_count)))) + ), + } count, output = _counter(start_date, end_date, output, payments=payments) - annual = count['sum'] + annual = count["sum"] else: count, output = _counter(start_date, end_date, output) - last_year_avg = count['avg'] or 0 - last_year_sum = count['sum'] or 0 - last_year_count = count['count'] - - - total = cls.objects.all().aggregate(sum=Sum('payment_amount')) - output += "\nTotal: $%s\n" % total['sum'] - + last_year_avg = count["avg"] or 0 + last_year_sum = count["sum"] or 0 + last_year_count = count["count"] + + total = cls.objects.all().aggregate(sum=Sum("payment_amount")) + output += "\nTotal: $%s\n" % total["sum"] + print(output) - - return {'annual': annual, 'output': output} + + return {"annual": annual, "output": output} class MGiftCode(mongo.Document): @@ -2095,108 +2402,124 @@ class MGiftCode(mongo.Document): duration_days = mongo.IntField() payment_amount = mongo.IntField() created_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'gift_codes', - 'allow_inheritance': False, - 'indexes': ['gifting_user_id', 'receiving_user_id', 'created_date'], + "collection": "gift_codes", + "allow_inheritance": False, + "indexes": ["gifting_user_id", "receiving_user_id", "created_date"], } - + def __str__(self): - return "%s gifted %s on %s: %s (redeemed %s times)" % (self.gifting_user_id, self.receiving_user_id, self.created_date, self.gift_code, self.redeemed) - + return "%s gifted %s on %s: %s (redeemed %s times)" % ( + self.gifting_user_id, + self.receiving_user_id, + self.created_date, + self.gift_code, + self.redeemed, + ) + @property def redeemed(self): redeemed_code = MRedeemedCode.objects.filter(gift_code=self.gift_code) return len(redeemed_code) - + @staticmethod def create_code(gift_code=None): u = str(uuid.uuid4()) code = u[:8] + u[9:13] if gift_code: - code = gift_code + code[len(gift_code):] + code = gift_code + code[len(gift_code) :] return code - + @classmethod def add(cls, gift_code=None, duration=0, gifting_user_id=None, receiving_user_id=None, payment=0): - return cls.objects.create(gift_code=cls.create_code(gift_code), - gifting_user_id=gifting_user_id, - receiving_user_id=receiving_user_id, - duration_days=duration, - payment_amount=payment) + return cls.objects.create( + gift_code=cls.create_code(gift_code), + gifting_user_id=gifting_user_id, + receiving_user_id=receiving_user_id, + duration_days=duration, + payment_amount=payment, + ) class MRedeemedCode(mongo.Document): user_id = mongo.IntField() gift_code = mongo.StringField() redeemed_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'redeemed_codes', - 'allow_inheritance': False, - 'indexes': ['user_id', 'gift_code', 'redeemed_date'], + "collection": "redeemed_codes", + "allow_inheritance": False, + "indexes": ["user_id", "gift_code", "redeemed_date"], } - + def __str__(self): return "%s redeemed %s on %s" % (self.user_id, self.gift_code, self.redeemed_date) - + @classmethod def record(cls, user_id, gift_code): - cls.objects.create(user_id=user_id, - gift_code=gift_code) + cls.objects.create(user_id=user_id, gift_code=gift_code) + @classmethod def redeem(cls, user, gift_code): newsblur_gift_code = MGiftCode.objects.filter(gift_code__iexact=gift_code) if newsblur_gift_code: newsblur_gift_code = newsblur_gift_code[0] - PaymentHistory.objects.create(user=user, - payment_date=datetime.datetime.now(), - payment_amount=newsblur_gift_code.payment_amount, - payment_provider='newsblur-gift') - + PaymentHistory.objects.create( + user=user, + payment_date=datetime.datetime.now(), + payment_amount=newsblur_gift_code.payment_amount, + payment_provider="newsblur-gift", + ) + else: # Thinkup / Good Web Bundle - PaymentHistory.objects.create(user=user, - payment_date=datetime.datetime.now(), - payment_amount=12, - payment_provider='good-web-bundle') + PaymentHistory.objects.create( + user=user, + payment_date=datetime.datetime.now(), + payment_amount=12, + payment_provider="good-web-bundle", + ) cls.record(user.pk, gift_code) user.profile.activate_premium() logging.user(user, "~FG~BBRedeeming gift code: %s~FW" % gift_code) - + class MCustomStyling(mongo.Document): user_id = mongo.IntField(unique=True) custom_css = mongo.StringField() custom_js = mongo.StringField() updated_date = mongo.DateTimeField(default=datetime.datetime.now) - + meta = { - 'collection': 'custom_styling', - 'allow_inheritance': False, - 'indexes': ['user_id'], + "collection": "custom_styling", + "allow_inheritance": False, + "indexes": ["user_id"], } - + def __str__(self): - return "%s custom style %s/%s %s" % (self.user_id, len(self.custom_css) if self.custom_css else "-", - len(self.custom_js) if self.custom_js else "-", self.updated_date) - + return "%s custom style %s/%s %s" % ( + self.user_id, + len(self.custom_css) if self.custom_css else "-", + len(self.custom_js) if self.custom_js else "-", + self.updated_date, + ) + def canonical(self): return { - 'css': self.custom_css, - 'js': self.custom_js, + "css": self.custom_css, + "js": self.custom_js, } - + @classmethod def get_user(cls, user_id): try: styling = cls.objects.get(user_id=user_id) except cls.DoesNotExist: return None - + return styling - + @classmethod def save_user(cls, user_id, css, js): styling = cls.get_user(user_id) @@ -2220,13 +2543,16 @@ class MDashboardRiver(mongo.Document): river_order = mongo.IntField() meta = { - 'collection': 'dashboard_river', - 'allow_inheritance': False, - 'indexes': ['user_id', - {'fields': ['user_id', 'river_id', 'river_side', 'river_order'], - 'unique': True, - }], - 'ordering': ['river_order'] + "collection": "dashboard_river", + "allow_inheritance": False, + "indexes": [ + "user_id", + { + "fields": ["user_id", "river_id", "river_side", "river_order"], + "unique": True, + }, + ], + "ordering": ["river_order"], } def __str__(self): @@ -2235,14 +2561,14 @@ def __str__(self): except User.DoesNotExist: u = "" return f"{u} ({self.river_side}/{self.river_order}): {self.river_id}" - + def canonical(self): return { - 'river_id': self.river_id, - 'river_side': self.river_side, - 'river_order': self.river_order, + "river_id": self.river_id, + "river_side": self.river_side, + "river_order": self.river_order, } - + @classmethod def get_user_rivers(cls, user_id): return cls.objects(user_id=user_id) @@ -2270,59 +2596,67 @@ def save_user(cls, user_id, river_id, river_side, river_order): river = None if not river: - river = cls.objects.create(user_id=user_id, river_id=river_id, - river_side=river_side, river_order=river_order) + river = cls.objects.create( + user_id=user_id, river_id=river_id, river_side=river_side, river_order=river_order + ) river.river_id = river_id river.river_side = river_side river.river_order = river_order river.save() + class RNewUserQueue: - KEY = "new_user_queue" - + @classmethod def activate_next(cls): count = cls.user_count() if not count: return - + user_id = cls.pop_user() try: user = User.objects.get(pk=user_id) except User.DoesNotExist: - logging.debug("~FRCan't activate free account, can't find user ~SB%s~SN. ~FB%s still in queue." % (user_id, count-1)) + logging.debug( + "~FRCan't activate free account, can't find user ~SB%s~SN. ~FB%s still in queue." + % (user_id, count - 1) + ) return - - logging.user(user, "~FBActivating free account (%s / %s). %s still in queue." % (user.email, user.profile.last_seen_ip, (count-1))) + + logging.user( + user, + "~FBActivating free account (%s / %s). %s still in queue." + % (user.email, user.profile.last_seen_ip, (count - 1)), + ) user.profile.activate_free() - + @classmethod def activate_all(cls): count = cls.user_count() if not count: logging.debug("~FBNo users to activate, sleeping...") return - + for i in range(count): cls.activate_next() - + @classmethod def add_user(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) now = time.time() - - r.zadd(cls.KEY, { user_id: now }) - + + r.zadd(cls.KEY, {user_id: now}) + @classmethod def user_count(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) count = r.zcard(cls.KEY) return count - + @classmethod def user_position(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -2331,7 +2665,7 @@ def user_position(cls, user_id): return -1 if position >= 0: return position + 1 - + @classmethod def pop_user(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -2339,4 +2673,3 @@ def pop_user(cls): r.zrem(cls.KEY, user) return user - diff --git a/apps/profile/tasks.py b/apps/profile/tasks.py index 9b8f7fd4b8..2179f01258 100644 --- a/apps/profile/tasks.py +++ b/apps/profile/tasks.py @@ -5,16 +5,19 @@ from apps.reader.models import UserSubscription, UserSubscriptionFolders from apps.social.models import MSocialServices, MActivity, MInteraction + @app.task(name="email-new-user") def EmailNewUser(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_user_email() + @app.task(name="email-new-premium") def EmailNewPremium(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_email() + @app.task() def FetchArchiveFeedsForUser(user_id): # subs = UserSubscription.objects.filter(user=user_id) @@ -23,33 +26,39 @@ def FetchArchiveFeedsForUser(user_id): UserSubscription.fetch_archive_feeds_for_user(user_id) + @app.task() def FetchArchiveFeedsChunk(user_id, feed_ids): # logging.debug(" ---> Fetching archive stories: %s for %s" % (feed_ids, user_id)) UserSubscription.fetch_archive_feeds_chunk(user_id, feed_ids) + @app.task() def FinishFetchArchiveFeeds(results, user_id, start_time, starting_story_count): # logging.debug(" ---> Fetching archive stories finished for %s" % (user_id)) - ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds(user_id, start_time, starting_story_count) + ending_story_count, pre_archive_count = UserSubscription.finish_fetch_archive_feeds( + user_id, start_time, starting_story_count + ) user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_archive_email(ending_story_count, pre_archive_count) + @app.task(name="email-new-premium-pro") def EmailNewPremiumPro(user_id): user_profile = Profile.objects.get(user__pk=user_id) user_profile.send_new_premium_pro_email() + @app.task(name="premium-expire") def PremiumExpire(**kwargs): # Get expired but grace period users two_days_ago = datetime.datetime.now() - datetime.timedelta(days=2) thirty_days_ago = datetime.datetime.now() - datetime.timedelta(days=30) - expired_profiles = Profile.objects.filter(is_premium=True, - premium_expire__lte=two_days_ago, - premium_expire__gt=thirty_days_ago) + expired_profiles = Profile.objects.filter( + is_premium=True, premium_expire__lte=two_days_ago, premium_expire__gt=thirty_days_ago + ) logging.debug(" ---> %s users have expired premiums, emailing grace..." % expired_profiles.count()) for profile in expired_profiles: if profile.grace_period_email_sent(): @@ -57,21 +66,24 @@ def PremiumExpire(**kwargs): profile.setup_premium_history() if profile.premium_expire < two_days_ago: profile.send_premium_expire_grace_period_email() - + # Get fully expired users - expired_profiles = Profile.objects.filter(is_premium=True, - premium_expire__lte=thirty_days_ago) - logging.debug(" ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count()) + expired_profiles = Profile.objects.filter(is_premium=True, premium_expire__lte=thirty_days_ago) + logging.debug( + " ---> %s users have expired premiums, deactivating and emailing..." % expired_profiles.count() + ) for profile in expired_profiles: profile.setup_premium_history() if profile.premium_expire < thirty_days_ago: profile.send_premium_expire_email() profile.deactivate_premium() + @app.task(name="activate-next-new-user") def ActivateNextNewUser(): RNewUserQueue.activate_next() + @app.task(name="cleanup-user") def CleanupUser(user_id): UserSubscription.trim_user_read_stories(user_id) @@ -82,7 +94,7 @@ def CleanupUser(user_id): UserSubscriptionFolders.add_missing_feeds_for_user(user_id) UserSubscriptionFolders.compact_for_user(user_id) UserSubscription.refresh_stale_feeds(user_id) - + try: ss = MSocialServices.objects.get(user_id=user_id) except MSocialServices.DoesNotExist: @@ -90,14 +102,14 @@ def CleanupUser(user_id): return ss.sync_twitter_photo() + @app.task(name="clean-spam") def CleanSpam(): logging.debug(" ---> Finding spammers...") Profile.clear_dead_spammers(confirm=True) + @app.task(name="reimport-stripe-history") def ReimportStripeHistory(): logging.debug(" ---> Reimporting Stripe history...") Profile.reimport_stripe_history(limit=10, days=1) - - diff --git a/apps/profile/test_profile.py b/apps/profile/test_profile.py index d35afcf3b4..15a1a9dd2e 100644 --- a/apps/profile/test_profile.py +++ b/apps/profile/test_profile.py @@ -5,33 +5,36 @@ from django.conf import settings from mongoengine.connection import connect, disconnect + class Test_Profile(TestCase): fixtures = [ - 'subscriptions.json', - 'rss_feeds.json', + "subscriptions.json", + "rss_feeds.json", ] - + def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') - self.client = Client(HTTP_USER_AGENT='Mozilla/5.0') + settings.MONGODB = connect("test_newsblur") + self.client = Client(HTTP_USER_AGENT="Mozilla/5.0") def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') - + settings.MONGODB.drop_database("test_newsblur") + def test_create_account(self): - resp = self.client.get(reverse('load-feeds')) + resp = self.client.get(reverse("load-feeds")) response = json.decode(resp.content) - self.assertEquals(response['authenticated'], False) + self.assertEquals(response["authenticated"], False) - response = self.client.post(reverse('welcome-signup'), { - 'signup-username': 'test', - 'signup-password': 'password', - 'signup-email': 'test@newsblur.com', - }) + response = self.client.post( + reverse("welcome-signup"), + { + "signup-username": "test", + "signup-password": "password", + "signup-email": "test@newsblur.com", + }, + ) self.assertEquals(response.status_code, 302) - resp = self.client.get(reverse('load-feeds')) + resp = self.client.get(reverse("load-feeds")) response = json.decode(resp.content) - self.assertEquals(response['authenticated'], True) - \ No newline at end of file + self.assertEquals(response["authenticated"], True) diff --git a/apps/profile/urls.py b/apps/profile/urls.py index cc264e4e41..57f7cc5190 100644 --- a/apps/profile/urls.py +++ b/apps/profile/urls.py @@ -2,41 +2,45 @@ from apps.profile import views urlpatterns = [ - url(r'^get_preferences?/?', views.get_preference), - url(r'^set_preference/?', views.set_preference), - url(r'^set_account_settings/?', views.set_account_settings), - url(r'^get_view_setting/?', views.get_view_setting), - url(r'^set_view_setting/?', views.set_view_setting), - url(r'^clear_view_setting/?', views.clear_view_setting), - url(r'^set_collapsed_folders/?', views.set_collapsed_folders), - url(r'^paypal_form/?', views.paypal_form), - url(r'^paypal_return/?', views.paypal_return, name='paypal-return'), - url(r'^paypal_archive_return/?', views.paypal_archive_return, name='paypal-archive-return'), - url(r'^stripe_return/?', views.paypal_return, name='stripe-return'), - url(r'^switch_stripe_subscription/?', views.switch_stripe_subscription, name='switch-stripe-subscription'), - url(r'^switch_paypal_subscription/?', views.switch_paypal_subscription, name='switch-paypal-subscription'), - url(r'^is_premium/?', views.profile_is_premium, name='profile-is-premium'), - url(r'^is_premium_archive/?', views.profile_is_premium_archive, name='profile-is-premium-archive'), + url(r"^get_preferences?/?", views.get_preference), + url(r"^set_preference/?", views.set_preference), + url(r"^set_account_settings/?", views.set_account_settings), + url(r"^get_view_setting/?", views.get_view_setting), + url(r"^set_view_setting/?", views.set_view_setting), + url(r"^clear_view_setting/?", views.clear_view_setting), + url(r"^set_collapsed_folders/?", views.set_collapsed_folders), + url(r"^paypal_form/?", views.paypal_form), + url(r"^paypal_return/?", views.paypal_return, name="paypal-return"), + url(r"^paypal_archive_return/?", views.paypal_archive_return, name="paypal-archive-return"), + url(r"^stripe_return/?", views.paypal_return, name="stripe-return"), + url( + r"^switch_stripe_subscription/?", views.switch_stripe_subscription, name="switch-stripe-subscription" + ), + url( + r"^switch_paypal_subscription/?", views.switch_paypal_subscription, name="switch-paypal-subscription" + ), + url(r"^is_premium/?", views.profile_is_premium, name="profile-is-premium"), + url(r"^is_premium_archive/?", views.profile_is_premium_archive, name="profile-is-premium-archive"), # url(r'^paypal_ipn/?', include('paypal.standard.ipn.urls'), name='paypal-ipn'), - url(r'^paypal_ipn/?', views.paypal_ipn, name='paypal-ipn'), - url(r'^paypal_webhooks/?', views.paypal_webhooks, name='paypal-webhooks'), - url(r'^stripe_form/?', views.stripe_form, name='stripe-form'), - url(r'^stripe_checkout/?', views.stripe_checkout, name='stripe-checkout'), - url(r'^activities/?', views.load_activities, name='profile-activities'), - url(r'^payment_history/?', views.payment_history, name='profile-payment-history'), - url(r'^cancel_premium/?', views.cancel_premium, name='profile-cancel-premium'), - url(r'^refund_premium/?', views.refund_premium, name='profile-refund-premium'), - url(r'^never_expire_premium/?', views.never_expire_premium, name='profile-never-expire-premium'), - url(r'^upgrade_premium/?', views.upgrade_premium, name='profile-upgrade-premium'), - url(r'^save_ios_receipt/?', views.save_ios_receipt, name='save-ios-receipt'), - url(r'^save_android_receipt/?', views.save_android_receipt, name='save-android-receipt'), - url(r'^update_payment_history/?', views.update_payment_history, name='profile-update-payment-history'), - url(r'^delete_account/?', views.delete_account, name='profile-delete-account'), - url(r'^forgot_password_return/?', views.forgot_password_return, name='profile-forgot-password-return'), - url(r'^forgot_password/?', views.forgot_password, name='profile-forgot-password'), - url(r'^delete_starred_stories/?', views.delete_starred_stories, name='profile-delete-starred-stories'), - url(r'^delete_all_sites/?', views.delete_all_sites, name='profile-delete-all-sites'), - url(r'^email_optout/?', views.email_optout, name='profile-email-optout'), - url(r'^ios_subscription_status/?', views.ios_subscription_status, name='profile-ios-subscription-status'), - url(r'debug/?', views.trigger_error, name='trigger-error'), + url(r"^paypal_ipn/?", views.paypal_ipn, name="paypal-ipn"), + url(r"^paypal_webhooks/?", views.paypal_webhooks, name="paypal-webhooks"), + url(r"^stripe_form/?", views.stripe_form, name="stripe-form"), + url(r"^stripe_checkout/?", views.stripe_checkout, name="stripe-checkout"), + url(r"^activities/?", views.load_activities, name="profile-activities"), + url(r"^payment_history/?", views.payment_history, name="profile-payment-history"), + url(r"^cancel_premium/?", views.cancel_premium, name="profile-cancel-premium"), + url(r"^refund_premium/?", views.refund_premium, name="profile-refund-premium"), + url(r"^never_expire_premium/?", views.never_expire_premium, name="profile-never-expire-premium"), + url(r"^upgrade_premium/?", views.upgrade_premium, name="profile-upgrade-premium"), + url(r"^save_ios_receipt/?", views.save_ios_receipt, name="save-ios-receipt"), + url(r"^save_android_receipt/?", views.save_android_receipt, name="save-android-receipt"), + url(r"^update_payment_history/?", views.update_payment_history, name="profile-update-payment-history"), + url(r"^delete_account/?", views.delete_account, name="profile-delete-account"), + url(r"^forgot_password_return/?", views.forgot_password_return, name="profile-forgot-password-return"), + url(r"^forgot_password/?", views.forgot_password, name="profile-forgot-password"), + url(r"^delete_starred_stories/?", views.delete_starred_stories, name="profile-delete-starred-stories"), + url(r"^delete_all_sites/?", views.delete_all_sites, name="profile-delete-all-sites"), + url(r"^email_optout/?", views.email_optout, name="profile-email-optout"), + url(r"^ios_subscription_status/?", views.ios_subscription_status, name="profile-ios-subscription-status"), + url(r"debug/?", views.trigger_error, name="trigger-error"), ] diff --git a/apps/profile/views.py b/apps/profile/views.py index 4700aa6594..34faa2eb7c 100644 --- a/apps/profile/views.py +++ b/apps/profile/views.py @@ -36,66 +36,84 @@ from paypal.standard.forms import PayPalPaymentsForm from paypal.standard.ipn.views import ipn as paypal_standard_ipn -INTEGER_FIELD_PREFS = ('feed_pane_size', 'days_of_unread') -SINGLE_FIELD_PREFS = ('timezone','hide_mobile','send_emails', - 'hide_getting_started', 'has_setup_feeds', 'has_found_friends', - 'has_trained_intelligence') -SPECIAL_PREFERENCES = ('old_password', 'new_password', 'autofollow_friends', 'dashboard_date',) +INTEGER_FIELD_PREFS = ("feed_pane_size", "days_of_unread") +SINGLE_FIELD_PREFS = ( + "timezone", + "hide_mobile", + "send_emails", + "hide_getting_started", + "has_setup_feeds", + "has_found_friends", + "has_trained_intelligence", +) +SPECIAL_PREFERENCES = ( + "old_password", + "new_password", + "autofollow_friends", + "dashboard_date", +) + @ajax_login_required @require_POST @json.json_view def set_preference(request): code = 1 - message = '' + message = "" new_preferences = request.POST - + preferences = json.decode(request.user.profile.preferences) for preference_name, preference_value in list(new_preferences.items()): - if preference_value in ['true','false']: preference_value = True if preference_value == 'true' else False + if preference_value in ["true", "false"]: + preference_value = True if preference_value == "true" else False if preference_name in SINGLE_FIELD_PREFS: setattr(request.user.profile, preference_name, preference_value) elif preference_name in INTEGER_FIELD_PREFS: - if preference_name == "days_of_unread" and int(preference_value) != request.user.profile.days_of_unread: + if ( + preference_name == "days_of_unread" + and int(preference_value) != request.user.profile.days_of_unread + ): UserSubscription.all_subs_needs_unread_recalc(request.user.pk) setattr(request.user.profile, preference_name, int(preference_value)) if preference_name in preferences: del preferences[preference_name] elif preference_name in SPECIAL_PREFERENCES: - if preference_name == 'autofollow_friends': + if preference_name == "autofollow_friends": social_services = MSocialServices.get_user(request.user.pk) social_services.autofollow = preference_value social_services.save() - elif preference_name == 'dashboard_date': + elif preference_name == "dashboard_date": request.user.profile.dashboard_date = datetime.datetime.utcnow() else: if preference_value in ["true", "false"]: preference_value = True if preference_value == "true" else False preferences[preference_name] = preference_value - if preference_name == 'intro_page': + if preference_name == "intro_page": logging.user(request, "~FBAdvancing intro to page ~FM~SB%s" % preference_value) - + request.user.profile.preferences = json.encode(preferences) request.user.profile.save() - + logging.user(request, "~FMSaving preference: %s" % new_preferences) response = dict(code=code, message=message, new_preferences=new_preferences) return response + @ajax_login_required @json.json_view def get_preference(request): code = 1 - preference_name = request.POST.get('preference') + preference_name = request.POST.get("preference") preferences = json.decode(request.user.profile.preferences) - + payload = preferences if preference_name: payload = preferences.get(preference_name) - + response = dict(code=code, payload=payload) return response + @csrf_protect def login(request): form = LoginForm() @@ -103,74 +121,80 @@ def login(request): if request.method == "POST": form = LoginForm(data=request.POST) if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") logging.user(form.get_user(), "~FG~BBOAuth Login~FW") - return HttpResponseRedirect(request.POST['next'] or reverse('index')) + return HttpResponseRedirect(request.POST["next"] or reverse("index")) + + return render( + request, + "accounts/login.html", + {"form": form, "next": request.POST.get("next", "") or request.GET.get("next", "")}, + ) + - return render(request, 'accounts/login.html', { - 'form': form, - 'next': request.POST.get('next', "") or request.GET.get('next', "") - }) - @csrf_exempt def signup(request): form = SignupForm(prefix="signup") - recaptcha = request.POST.get('g-recaptcha-response', None) + recaptcha = request.POST.get("g-recaptcha-response", None) recaptcha_error = None - + if settings.ENFORCE_SIGNUP_CAPTCHA: if not recaptcha: - recaptcha_error = "Please hit the \"I'm not a robot\" button." + recaptcha_error = 'Please hit the "I\'m not a robot" button.' else: - response = requests.post('https://www.google.com/recaptcha/api/siteverify', { - 'secret': settings.RECAPTCHA_SECRET_KEY, - 'response': recaptcha, - }) + response = requests.post( + "https://www.google.com/recaptcha/api/siteverify", + { + "secret": settings.RECAPTCHA_SECRET_KEY, + "response": recaptcha, + }, + ) result = response.json() - if not result['success']: - recaptcha_error = "Really, please hit the \"I'm not a robot\" button." + if not result["success"]: + recaptcha_error = 'Really, please hit the "I\'m not a robot" button.' if request.method == "POST": form = SignupForm(data=request.POST, prefix="signup") if form.is_valid() and not recaptcha_error: new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email) new_user.profile.activate_free() - return HttpResponseRedirect(request.POST['next'] or reverse('index')) + return HttpResponseRedirect(request.POST["next"] or reverse("index")) + + return render( + request, + "accounts/signup.html", + {"form": form, "recaptcha_error": recaptcha_error, "next": request.POST.get("next", "")}, + ) - return render(request, 'accounts/signup.html', { - 'form': form, - 'recaptcha_error': recaptcha_error, - 'next': request.POST.get('next', "") - }) @login_required @csrf_protect def redeem_code(request): - code = request.GET.get('code', None) - form = RedeemCodeForm(initial={'gift_code': code}) + code = request.GET.get("code", None) + form = RedeemCodeForm(initial={"gift_code": code}) if request.method == "POST": form = RedeemCodeForm(data=request.POST) if form.is_valid(): - gift_code = request.POST['gift_code'] + gift_code = request.POST["gift_code"] MRedeemedCode.redeem(user=request.user, gift_code=gift_code) - return render(request, 'reader/paypal_return.xhtml') + return render(request, "reader/paypal_return.xhtml") + + return render( + request, + "accounts/redeem_code.html", + {"form": form, "code": request.POST.get("code", ""), "next": request.POST.get("next", "")}, + ) - return render(request, 'accounts/redeem_code.html', { - 'form': form, - 'code': request.POST.get('code', ""), - 'next': request.POST.get('next', "") - }) - @ajax_login_required @require_POST @json.json_view def set_account_settings(request): code = -1 - message = 'OK' + message = "OK" form = AccountSettingsForm(user=request.user, data=request.POST) if form.is_valid(): @@ -178,100 +202,113 @@ def set_account_settings(request): code = 1 else: message = form.errors[list(form.errors.keys())[0]][0] - + payload = { "username": request.user.username, "email": request.user.email, - "social_profile": MSocialProfile.profile(request.user.pk) + "social_profile": MSocialProfile.profile(request.user.pk), } return dict(code=code, message=message, payload=payload) - + + @ajax_login_required @require_POST @json.json_view def set_view_setting(request): code = 1 - feed_id = request.POST['feed_id'] - feed_view_setting = request.POST.get('feed_view_setting') - feed_order_setting = request.POST.get('feed_order_setting') - feed_read_filter_setting = request.POST.get('feed_read_filter_setting') - feed_layout_setting = request.POST.get('feed_layout_setting') - feed_dashboard_count_setting = request.POST.get('feed_dashboard_count_setting') + feed_id = request.POST["feed_id"] + feed_view_setting = request.POST.get("feed_view_setting") + feed_order_setting = request.POST.get("feed_order_setting") + feed_read_filter_setting = request.POST.get("feed_read_filter_setting") + feed_layout_setting = request.POST.get("feed_layout_setting") + feed_dashboard_count_setting = request.POST.get("feed_dashboard_count_setting") view_settings = json.decode(request.user.profile.view_settings) - + setting = view_settings.get(feed_id, {}) - if isinstance(setting, str): setting = {'v': setting} - if feed_view_setting: setting['v'] = feed_view_setting - if feed_order_setting: setting['o'] = feed_order_setting - if feed_read_filter_setting: setting['r'] = feed_read_filter_setting - if feed_dashboard_count_setting: setting['d'] = feed_dashboard_count_setting - if feed_layout_setting: setting['l'] = feed_layout_setting - + if isinstance(setting, str): + setting = {"v": setting} + if feed_view_setting: + setting["v"] = feed_view_setting + if feed_order_setting: + setting["o"] = feed_order_setting + if feed_read_filter_setting: + setting["r"] = feed_read_filter_setting + if feed_dashboard_count_setting: + setting["d"] = feed_dashboard_count_setting + if feed_layout_setting: + setting["l"] = feed_layout_setting + view_settings[feed_id] = setting request.user.profile.view_settings = json.encode(view_settings) request.user.profile.save() - - logging.user(request, "~FMView settings: %s/%s/%s/%s" % (feed_view_setting, - feed_order_setting, feed_read_filter_setting, feed_layout_setting)) + + logging.user( + request, + "~FMView settings: %s/%s/%s/%s" + % (feed_view_setting, feed_order_setting, feed_read_filter_setting, feed_layout_setting), + ) response = dict(code=code) return response + @ajax_login_required @require_POST @json.json_view def clear_view_setting(request): code = 1 - view_setting_type = request.POST.get('view_setting_type') + view_setting_type = request.POST.get("view_setting_type") view_settings = json.decode(request.user.profile.view_settings) new_view_settings = {} removed = 0 for feed_id, view_setting in list(view_settings.items()): - if view_setting_type == 'layout' and 'l' in view_setting: - del view_setting['l'] + if view_setting_type == "layout" and "l" in view_setting: + del view_setting["l"] removed += 1 - if view_setting_type == 'view' and 'v' in view_setting: - del view_setting['v'] + if view_setting_type == "view" and "v" in view_setting: + del view_setting["v"] removed += 1 - if view_setting_type == 'order' and 'o' in view_setting: - del view_setting['o'] + if view_setting_type == "order" and "o" in view_setting: + del view_setting["o"] removed += 1 - if view_setting_type == 'order' and 'r' in view_setting: - del view_setting['r'] + if view_setting_type == "order" and "r" in view_setting: + del view_setting["r"] removed += 1 new_view_settings[feed_id] = view_setting request.user.profile.view_settings = json.encode(new_view_settings) request.user.profile.save() - + logging.user(request, "~FMClearing view settings: %s (found %s)" % (view_setting_type, removed)) response = dict(code=code, view_settings=view_settings, removed=removed) return response - + + @ajax_login_required @json.json_view def get_view_setting(request): code = 1 - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] view_settings = json.decode(request.user.profile.view_settings) - + response = dict(code=code, payload=view_settings.get(feed_id)) return response - + @ajax_login_required @require_POST @json.json_view def set_collapsed_folders(request): code = 1 - collapsed_folders = request.POST['collapsed_folders'] - + collapsed_folders = request.POST["collapsed_folders"] + request.user.profile.collapsed_folders = collapsed_folders request.user.profile.save() - + logging.user(request, "~FMCollapsing folder: %s" % collapsed_folders) response = dict(code=code) return response + def paypal_ipn(request): try: return paypal_standard_ipn(request) @@ -279,23 +316,24 @@ def paypal_ipn(request): # Paypal may have sent webhooks to ipn, so redirect logging.user(request, f" ---> Paypal IPN to webhooks redirect: {request.body}") return paypal_webhooks(request) - + + def paypal_webhooks(request): try: data = json.decode(request.body) except python_json.decoder.JSONDecodeError: # Kick it over to paypal ipn return paypal_standard_ipn(request) - + logging.user(request, f" ---> Paypal webhooks {data.get('event_type', '')} data: {data}") - - if data['event_type'] == "BILLING.SUBSCRIPTION.CREATED": + + if data["event_type"] == "BILLING.SUBSCRIPTION.CREATED": # Don't start a subscription but save it in case the payment comes before the subscription activation - user = User.objects.get(pk=int(data['resource']['custom_id'])) - user.profile.store_paypal_sub_id(data['resource']['id'], skip_save_primary=True) - elif data['event_type'] in ["BILLING.SUBSCRIPTION.ACTIVATED", "BILLING.SUBSCRIPTION.UPDATED"]: - user = User.objects.get(pk=int(data['resource']['custom_id'])) - user.profile.store_paypal_sub_id(data['resource']['id']) + user = User.objects.get(pk=int(data["resource"]["custom_id"])) + user.profile.store_paypal_sub_id(data["resource"]["id"], skip_save_primary=True) + elif data["event_type"] in ["BILLING.SUBSCRIPTION.ACTIVATED", "BILLING.SUBSCRIPTION.UPDATED"]: + user = User.objects.get(pk=int(data["resource"]["custom_id"])) + user.profile.store_paypal_sub_id(data["resource"]["id"]) # plan_id = data['resource']['plan_id'] # if plan_id == Profile.plan_to_paypal_plan_id('premium'): # user.profile.activate_premium() @@ -305,43 +343,44 @@ def paypal_webhooks(request): # user.profile.activate_pro() user.profile.cancel_premium_stripe() user.profile.setup_premium_history() - if data['event_type'] == "BILLING.SUBSCRIPTION.ACTIVATED": + if data["event_type"] == "BILLING.SUBSCRIPTION.ACTIVATED": user.profile.cancel_and_prorate_existing_paypal_subscriptions(data) - elif data['event_type'] == "PAYMENT.SALE.COMPLETED": - user = User.objects.get(pk=int(data['resource']['custom'])) + elif data["event_type"] == "PAYMENT.SALE.COMPLETED": + user = User.objects.get(pk=int(data["resource"]["custom"])) user.profile.setup_premium_history() - elif data['event_type'] == "PAYMENT.CAPTURE.REFUNDED": - user = User.objects.get(pk=int(data['resource']['custom_id'])) + elif data["event_type"] == "PAYMENT.CAPTURE.REFUNDED": + user = User.objects.get(pk=int(data["resource"]["custom_id"])) user.profile.setup_premium_history() - elif data['event_type'] in ["BILLING.SUBSCRIPTION.CANCELLED", "BILLING.SUBSCRIPTION.SUSPENDED"]: - custom_id = data['resource'].get('custom_id', None) + elif data["event_type"] in ["BILLING.SUBSCRIPTION.CANCELLED", "BILLING.SUBSCRIPTION.SUSPENDED"]: + custom_id = data["resource"].get("custom_id", None) if custom_id: user = User.objects.get(pk=int(custom_id)) else: - paypal_id = PaypalIds.objects.get(paypal_sub_id=data['resource']['id']) + paypal_id = PaypalIds.objects.get(paypal_sub_id=data["resource"]["id"]) user = paypal_id.user user.profile.setup_premium_history() return HttpResponse("OK") + def paypal_form(request): domain = Site.objects.get_current().domain if settings.DEBUG: domain = "73ee-71-233-245-159.ngrok.io" - + paypal_dict = { "cmd": "_xclick-subscriptions", "business": "samuel@ofbrooklyn.com", - "a3": "12.00", # price - "p3": 1, # duration of each unit (depends on unit) - "t3": "Y", # duration unit ("M for Month") - "src": "1", # make payments recur - "sra": "1", # reattempt payment on payment error - "no_note": "1", # remove extra notes (optional) + "a3": "12.00", # price + "p3": 1, # duration of each unit (depends on unit) + "t3": "Y", # duration unit ("M for Month") + "src": "1", # make payments recur + "sra": "1", # reattempt payment on payment error + "no_note": "1", # remove extra notes (optional) "item_name": "NewsBlur Premium Account", - "notify_url": "https://%s%s" % (domain, reverse('paypal-ipn')), - "return_url": "https://%s%s" % (domain, reverse('paypal-return')), - "cancel_return": "https://%s%s" % (domain, reverse('index')), + "notify_url": "https://%s%s" % (domain, reverse("paypal-ipn")), + "return_url": "https://%s%s" % (domain, reverse("paypal-return")), + "cancel_return": "https://%s%s" % (domain, reverse("index")), "custom": request.user.username, } @@ -351,303 +390,360 @@ def paypal_form(request): logging.user(request, "~FBLoading paypal/feedchooser") # Output the button. - return HttpResponse(form.render(), content_type='text/html') + return HttpResponse(form.render(), content_type="text/html") + @login_required def paypal_return(request): + return render( + request, + "reader/paypal_return.xhtml", + { + "user_profile": request.user.profile, + }, + ) - return render(request, 'reader/paypal_return.xhtml', { - 'user_profile': request.user.profile, - }) @login_required def paypal_archive_return(request): + return render( + request, + "reader/paypal_archive_return.xhtml", + { + "user_profile": request.user.profile, + }, + ) - return render(request, 'reader/paypal_archive_return.xhtml', { - 'user_profile': request.user.profile, - }) @login_required def activate_premium(request): - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + + @ajax_login_required @json.json_view def profile_is_premium(request): # Check tries code = 0 - retries = int(request.GET['retries']) - + retries = int(request.GET["retries"]) + subs = UserSubscription.objects.filter(user=request.user) total_subs = subs.count() activated_subs = subs.filter(active=True).count() - + if retries >= 30: code = -1 if not request.user.profile.is_premium: subject = "Premium activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) - message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) + message = """User: %s (%s) -- Email: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + ) mail_admins(subject, message) request.user.profile.activate_premium() - + profile = Profile.objects.get(user=request.user) return { - 'is_premium': profile.is_premium, - 'is_premium_archive': profile.is_archive, - 'code': code, - 'activated_subs': activated_subs, - 'total_subs': total_subs, + "is_premium": profile.is_premium, + "is_premium_archive": profile.is_archive, + "code": code, + "activated_subs": activated_subs, + "total_subs": total_subs, } + @ajax_login_required @json.json_view def profile_is_premium_archive(request): # Check tries code = 0 - retries = int(request.GET['retries']) + retries = int(request.GET["retries"]) subs = UserSubscription.objects.filter(user=request.user) total_subs = subs.count() activated_subs = subs.filter(feed__archive_subscribers__gte=1).count() - + if retries >= 30: code = -1 if not request.user.profile.is_premium_archive: - subject = "Premium archive activation failed: %s (%s/%s)" % (request.user, activated_subs, total_subs) - message = """User: %s (%s) -- Email: %s""" % (request.user.username, request.user.pk, request.user.email) + subject = "Premium archive activation failed: %s (%s/%s)" % ( + request.user, + activated_subs, + total_subs, + ) + message = """User: %s (%s) -- Email: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + ) mail_admins(subject, message) request.user.profile.activate_archive() profile = Profile.objects.get(user=request.user) return { - 'is_premium': profile.is_premium, - 'is_premium_archive': profile.is_archive, - 'code': code, - 'activated_subs': activated_subs, - 'total_subs': total_subs, + "is_premium": profile.is_premium, + "is_premium_archive": profile.is_archive, + "code": code, + "activated_subs": activated_subs, + "total_subs": total_subs, } + @ajax_login_required @json.json_view def save_ios_receipt(request): - receipt = request.POST.get('receipt') - product_identifier = request.POST.get('product_identifier') - transaction_identifier = request.POST.get('transaction_identifier') - + receipt = request.POST.get("receipt") + product_identifier = request.POST.get("product_identifier") + transaction_identifier = request.POST.get("transaction_identifier") + logging.user(request, "~BM~FBSaving iOS Receipt: %s %s" % (product_identifier, transaction_identifier)) - + paid = request.user.profile.activate_ios_premium(transaction_identifier) if paid: - logging.user(request, "~BM~FBSending iOS Receipt email: %s %s" % (product_identifier, transaction_identifier)) + logging.user( + request, "~BM~FBSending iOS Receipt email: %s %s" % (product_identifier, transaction_identifier) + ) subject = "iOS Premium: %s (%s)" % (request.user.profile, product_identifier) - message = """User: %s (%s) -- Email: %s, product: %s, txn: %s, receipt: %s""" % (request.user.username, request.user.pk, request.user.email, product_identifier, transaction_identifier, receipt) + message = """User: %s (%s) -- Email: %s, product: %s, txn: %s, receipt: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + product_identifier, + transaction_identifier, + receipt, + ) mail_admins(subject, message) else: - logging.user(request, "~BM~FBNot sending iOS Receipt email, already paid: %s %s" % (product_identifier, transaction_identifier)) - - + logging.user( + request, + "~BM~FBNot sending iOS Receipt email, already paid: %s %s" + % (product_identifier, transaction_identifier), + ) + return request.user.profile - + + @ajax_login_required @json.json_view def save_android_receipt(request): - order_id = request.POST.get('order_id') - product_id = request.POST.get('product_id') - + order_id = request.POST.get("order_id") + product_id = request.POST.get("product_id") + logging.user(request, "~BM~FBSaving Android Receipt: %s %s" % (product_id, order_id)) - + paid = request.user.profile.activate_android_premium(order_id) if paid: logging.user(request, "~BM~FBSending Android Receipt email: %s %s" % (product_id, order_id)) subject = "Android Premium: %s (%s)" % (request.user.profile, product_id) - message = """User: %s (%s) -- Email: %s, product: %s, order: %s""" % (request.user.username, request.user.pk, request.user.email, product_id, order_id) + message = """User: %s (%s) -- Email: %s, product: %s, order: %s""" % ( + request.user.username, + request.user.pk, + request.user.email, + product_id, + order_id, + ) mail_admins(subject, message) else: - logging.user(request, "~BM~FBNot sending Android Receipt email, already paid: %s %s" % (product_id, order_id)) - - + logging.user( + request, "~BM~FBNot sending Android Receipt email, already paid: %s %s" % (product_id, order_id) + ) + return request.user.profile - + + @login_required def stripe_form(request): user = request.user success_updating = False stripe.api_key = settings.STRIPE_SECRET plan = PLANS[0][0] - renew = is_true(request.GET.get('renew', False)) + renew = is_true(request.GET.get("renew", False)) error = None - - if request.method == 'POST': + + if request.method == "POST": zebra_form = StripePlusPaymentForm(request.POST, email=user.email) if zebra_form.is_valid(): - user.email = zebra_form.cleaned_data['email'] + user.email = zebra_form.cleaned_data["email"] user.save() customer = None - current_premium = (user.profile.is_premium and - user.profile.premium_expire and - user.profile.premium_expire > datetime.datetime.now()) - + current_premium = ( + user.profile.is_premium + and user.profile.premium_expire + and user.profile.premium_expire > datetime.datetime.now() + ) + # Are they changing their existing card? if user.profile.stripe_id: customer = stripe.Customer.retrieve(user.profile.stripe_id) try: - card = customer.sources.create(source=zebra_form.cleaned_data['stripe_token']) + card = customer.sources.create(source=zebra_form.cleaned_data["stripe_token"]) except stripe.error.CardError: error = "This card was declined." else: customer.default_card = card.id customer.save() - user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] + user.profile.strip_4_digits = zebra_form.cleaned_data["last_4_digits"] user.profile.save() - user.profile.activate_premium() # TODO: Remove, because webhooks are slow + user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True else: try: - customer = stripe.Customer.create(**{ - 'source': zebra_form.cleaned_data['stripe_token'], - 'plan': zebra_form.cleaned_data['plan'], - 'email': user.email, - 'description': user.username, - }) + customer = stripe.Customer.create( + **{ + "source": zebra_form.cleaned_data["stripe_token"], + "plan": zebra_form.cleaned_data["plan"], + "email": user.email, + "description": user.username, + } + ) except stripe.error.CardError: error = "This card was declined." else: - user.profile.strip_4_digits = zebra_form.cleaned_data['last_4_digits'] + user.profile.strip_4_digits = zebra_form.cleaned_data["last_4_digits"] user.profile.stripe_id = customer.id user.profile.save() - user.profile.activate_premium() # TODO: Remove, because webhooks are slow + user.profile.activate_premium() # TODO: Remove, because webhooks are slow success_updating = True - + # Check subscription to ensure latest plan, otherwise cancel it and subscribe if success_updating and customer and customer.subscriptions.total_count == 1: subscription = customer.subscriptions.data[0] - if subscription['plan']['id'] != "newsblur-premium-36": + if subscription["plan"]["id"] != "newsblur-premium-36": for sub in customer.subscriptions: sub.delete() customer = stripe.Customer.retrieve(user.profile.stripe_id) - + if success_updating and customer and customer.subscriptions.total_count == 0: params = dict( - customer=customer.id, - items=[ - { - "plan": "newsblur-premium-36", - }, - ]) + customer=customer.id, + items=[ + { + "plan": "newsblur-premium-36", + }, + ], + ) premium_expire = user.profile.premium_expire if current_premium and premium_expire: if premium_expire < (datetime.datetime.now() + datetime.timedelta(days=365)): - params['billing_cycle_anchor'] = premium_expire.strftime('%s') - params['trial_end'] = premium_expire.strftime('%s') + params["billing_cycle_anchor"] = premium_expire.strftime("%s") + params["trial_end"] = premium_expire.strftime("%s") stripe.Subscription.create(**params) else: zebra_form = StripePlusPaymentForm(email=user.email, plan=plan) - + if success_updating: - return render(request, 'reader/paypal_return.xhtml') - + return render(request, "reader/paypal_return.xhtml") + new_user_queue_count = RNewUserQueue.user_count() new_user_queue_position = RNewUserQueue.user_position(request.user.pk) new_user_queue_behind = 0 if new_user_queue_position >= 0: - new_user_queue_behind = new_user_queue_count - new_user_queue_position + new_user_queue_behind = new_user_queue_count - new_user_queue_position new_user_queue_position -= 1 - + immediate_charge = True if user.profile.premium_expire and user.profile.premium_expire > datetime.datetime.now(): immediate_charge = False - + logging.user(request, "~BM~FBLoading Stripe form") - return render(request, 'profile/stripe_form.xhtml', + return render( + request, + "profile/stripe_form.xhtml", { - 'zebra_form': zebra_form, - 'publishable': settings.STRIPE_PUBLISHABLE, - 'success_updating': success_updating, - 'new_user_queue_count': new_user_queue_count - 1, - 'new_user_queue_position': new_user_queue_position, - 'new_user_queue_behind': new_user_queue_behind, - 'renew': renew, - 'immediate_charge': immediate_charge, - 'error': error, - } + "zebra_form": zebra_form, + "publishable": settings.STRIPE_PUBLISHABLE, + "success_updating": success_updating, + "new_user_queue_count": new_user_queue_count - 1, + "new_user_queue_position": new_user_queue_position, + "new_user_queue_behind": new_user_queue_behind, + "renew": renew, + "immediate_charge": immediate_charge, + "error": error, + }, ) + @login_required def switch_stripe_subscription(request): - plan = request.POST['plan'] + plan = request.POST["plan"] if plan == "change_stripe": return stripe_checkout(request) elif plan == "change_paypal": paypal_url = request.user.profile.paypal_change_billing_details_url() return HttpResponseRedirect(paypal_url) - + switch_successful = request.user.profile.switch_stripe_subscription(plan) - - logging.user(request, "~FCSwitching subscription to ~SB%s~SN~FC (%s)" %( - plan, - '~FGsucceeded~FC' if switch_successful else '~FRfailed~FC' - )) - + + logging.user( + request, + "~FCSwitching subscription to ~SB%s~SN~FC (%s)" + % (plan, "~FGsucceeded~FC" if switch_successful else "~FRfailed~FC"), + ) + if switch_successful: - return HttpResponseRedirect(reverse('stripe-return')) - + return HttpResponseRedirect(reverse("stripe-return")) + return stripe_checkout(request) + def switch_paypal_subscription(request): - plan = request.POST['plan'] + plan = request.POST["plan"] if plan == "change_stripe": return stripe_checkout(request) elif plan == "change_paypal": paypal_url = request.user.profile.paypal_change_billing_details_url() return HttpResponseRedirect(paypal_url) - + approve_url = request.user.profile.switch_paypal_subscription_approval_url(plan) - - logging.user(request, "~FCSwitching subscription to ~SB%s~SN~FC (%s)" %( - plan, - '~FGsucceeded~FC' if approve_url else '~FRfailed~FC' - )) - + + logging.user( + request, + "~FCSwitching subscription to ~SB%s~SN~FC (%s)" + % (plan, "~FGsucceeded~FC" if approve_url else "~FRfailed~FC"), + ) + if approve_url: return HttpResponseRedirect(approve_url) - paypal_return = reverse('paypal-return') + paypal_return = reverse("paypal-return") if plan == "archive": - paypal_return = reverse('paypal-archive-return') + paypal_return = reverse("paypal-archive-return") return HttpResponseRedirect(paypal_return) + @login_required def stripe_checkout(request): stripe.api_key = settings.STRIPE_SECRET domain = Site.objects.get_current().domain - plan = request.POST['plan'] - + plan = request.POST["plan"] + if plan == "change_stripe": checkout_session = stripe.billing_portal.Session.create( customer=request.user.profile.stripe_id, - return_url="http://%s%s?next=payments" % (domain, reverse('index')), + return_url="http://%s%s?next=payments" % (domain, reverse("index")), ) return HttpResponseRedirect(checkout_session.url, status=303) - + price = Profile.plan_to_stripe_price(plan) - + session_dict = { "line_items": [ { - 'price': price, - 'quantity': 1, + "price": price, + "quantity": 1, }, ], - "mode": 'subscription', + "mode": "subscription", "metadata": {"newsblur_user_id": request.user.pk}, - "success_url": "http://%s%s" % (domain, reverse('stripe-return')), - "cancel_url": "http://%s%s" % (domain, reverse('index')), + "success_url": "http://%s%s" % (domain, reverse("stripe-return")), + "cancel_url": "http://%s%s" % (domain, reverse("index")), } if request.user.profile.stripe_id: - session_dict['customer'] = request.user.profile.stripe_id + session_dict["customer"] = request.user.profile.stripe_id else: session_dict["customer_email"] = request.user.email @@ -657,25 +753,27 @@ def stripe_checkout(request): return HttpResponseRedirect(checkout_session.url, status=303) -@render_to('reader/activities_module.xhtml') + +@render_to("reader/activities_module.xhtml") def load_activities(request): user = get_user(request) - page = max(1, int(request.GET.get('page', 1))) + page = max(1, int(request.GET.get("page", 1))) activities, has_next_page = MActivity.user(user.pk, page=page) return { - 'activities': activities, - 'page': page, - 'has_next_page': has_next_page, - 'username': 'You', + "activities": activities, + "page": page, + "has_next_page": has_next_page, + "username": "You", } + @ajax_login_required @json.json_view def payment_history(request): user = request.user if request.user.is_staff: - user_id = request.GET.get('user_id', request.user.pk) + user_id = request.GET.get("user_id", request.user.pk) user = User.objects.get(pk=user_id) history = PaymentHistory.objects.filter(user=user) @@ -690,19 +788,19 @@ def payment_history(request): "feeds": UserSubscription.objects.filter(user=user).count(), "email": user.email, "read_story_count": RUserStory.read_story_count(user.pk), - "feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum('feed_opens'))['sum'], + "feed_opens": UserSubscription.objects.filter(user=user).aggregate(sum=Sum("feed_opens"))["sum"], "training": { - 'title_ps': MClassifierTitle.objects.filter(user_id=user.pk, score__gt=0).count(), - 'title_ng': MClassifierTitle.objects.filter(user_id=user.pk, score__lt=0).count(), - 'tag_ps': MClassifierTag.objects.filter(user_id=user.pk, score__gt=0).count(), - 'tag_ng': MClassifierTag.objects.filter(user_id=user.pk, score__lt=0).count(), - 'author_ps': MClassifierAuthor.objects.filter(user_id=user.pk, score__gt=0).count(), - 'author_ng': MClassifierAuthor.objects.filter(user_id=user.pk, score__lt=0).count(), - 'feed_ps': MClassifierFeed.objects.filter(user_id=user.pk, score__gt=0).count(), - 'feed_ng': MClassifierFeed.objects.filter(user_id=user.pk, score__lt=0).count(), - } + "title_ps": MClassifierTitle.objects.filter(user_id=user.pk, score__gt=0).count(), + "title_ng": MClassifierTitle.objects.filter(user_id=user.pk, score__lt=0).count(), + "tag_ps": MClassifierTag.objects.filter(user_id=user.pk, score__gt=0).count(), + "tag_ng": MClassifierTag.objects.filter(user_id=user.pk, score__lt=0).count(), + "author_ps": MClassifierAuthor.objects.filter(user_id=user.pk, score__gt=0).count(), + "author_ng": MClassifierAuthor.objects.filter(user_id=user.pk, score__lt=0).count(), + "feed_ps": MClassifierFeed.objects.filter(user_id=user.pk, score__gt=0).count(), + "feed_ng": MClassifierFeed.objects.filter(user_id=user.pk, score__lt=0).count(), + }, } - + next_invoice = None stripe_customer = user.profile.stripe_customer() paypal_api = user.profile.paypal_api() @@ -710,48 +808,54 @@ def payment_history(request): try: invoice = stripe.Invoice.upcoming(customer=stripe_customer.id) for lines in invoice.lines.data: - next_invoice = dict(payment_date=datetime.datetime.fromtimestamp(lines.period.start), - payment_amount=invoice.amount_due/100.0, - payment_provider="(scheduled)", - scheduled=True) + next_invoice = dict( + payment_date=datetime.datetime.fromtimestamp(lines.period.start), + payment_amount=invoice.amount_due / 100.0, + payment_provider="(scheduled)", + scheduled=True, + ) break except stripe.error.InvalidRequestError: pass - + if paypal_api and not next_invoice and user.profile.premium_renewal and len(history): - next_invoice = dict(payment_date=history[0].payment_date+dateutil.relativedelta.relativedelta(years=1), - payment_amount=history[0].payment_amount, - payment_provider="(scheduled)", - scheduled=True) - + next_invoice = dict( + payment_date=history[0].payment_date + dateutil.relativedelta.relativedelta(years=1), + payment_amount=history[0].payment_amount, + payment_provider="(scheduled)", + scheduled=True, + ) + return { - 'is_premium': user.profile.is_premium, - 'is_archive': user.profile.is_archive, - 'is_pro': user.profile.is_pro, - 'premium_expire': user.profile.premium_expire, - 'premium_renewal': user.profile.premium_renewal, - 'active_provider': user.profile.active_provider, - 'payments': history, - 'statistics': statistics, - 'next_invoice': next_invoice, + "is_premium": user.profile.is_premium, + "is_archive": user.profile.is_archive, + "is_pro": user.profile.is_pro, + "premium_expire": user.profile.premium_expire, + "premium_renewal": user.profile.premium_renewal, + "active_provider": user.profile.active_provider, + "payments": history, + "statistics": statistics, + "next_invoice": next_invoice, } + @ajax_login_required @json.json_view def cancel_premium(request): canceled = request.user.profile.cancel_premium() - + return { - 'code': 1 if canceled else -1, + "code": 1 if canceled else -1, } + @staff_member_required @ajax_login_required @json.json_view def refund_premium(request): - user_id = request.POST.get('user_id') - partial = request.POST.get('partial', False) - provider = request.POST.get('provider', None) + user_id = request.POST.get("user_id") + partial = request.POST.get("partial", False) + provider = request.POST.get("provider", None) user = User.objects.get(pk=user_id) try: refunded = user.profile.refund_premium(partial=partial, provider=provider) @@ -760,179 +864,185 @@ def refund_premium(request): except PayPalAPIResponseError as e: refunded = e - return {'code': 1 if type(refunded) == int else -1, 'refunded': refunded} + return {"code": 1 if type(refunded) == int else -1, "refunded": refunded} + @staff_member_required @ajax_login_required @json.json_view def upgrade_premium(request): - user_id = request.POST.get('user_id') + user_id = request.POST.get("user_id") user = User.objects.get(pk=user_id) - - gift = MGiftCode.add(gifting_user_id=User.objects.get(username='samuel').pk, - receiving_user_id=user.pk) + + gift = MGiftCode.add(gifting_user_id=User.objects.get(username="samuel").pk, receiving_user_id=user.pk) MRedeemedCode.redeem(user, gift.gift_code) - - return {'code': user.profile.is_premium} + + return {"code": user.profile.is_premium} + @staff_member_required @ajax_login_required @json.json_view def never_expire_premium(request): - user_id = request.POST.get('user_id') - years = int(request.POST.get('years', 0)) + user_id = request.POST.get("user_id") + years = int(request.POST.get("years", 0)) user = User.objects.get(pk=user_id) if user.profile.is_premium: if years: - user.profile.premium_expire = datetime.datetime.now() + datetime.timedelta(days=365*years) + user.profile.premium_expire = datetime.datetime.now() + datetime.timedelta(days=365 * years) else: user.profile.premium_expire = None user.profile.save() - return {'code': 1} - - return {'code': -1} + return {"code": 1} + + return {"code": -1} + @staff_member_required @ajax_login_required @json.json_view def update_payment_history(request): - user_id = request.POST.get('user_id') + user_id = request.POST.get("user_id") user = User.objects.get(pk=user_id) user.profile.setup_premium_history(set_premium_expire=False) - - return {'code': 1} - + + return {"code": 1} + + @login_required -@render_to('profile/delete_account.xhtml') +@render_to("profile/delete_account.xhtml") def delete_account(request): - if request.method == 'POST': + if request.method == "POST": form = DeleteAccountForm(request.POST, user=request.user) if form.is_valid(): - logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % - request.user.username) + logging.user(request.user, "~SK~BC~FRDeleting ~SB%s~SN's account." % request.user.username) request.user.profile.delete_user(confirm=True) logout_user(request) - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % - request.user.username) + logging.user( + request.user, "~BC~FRFailed attempt to delete ~SB%s~SN's account." % request.user.username + ) else: - logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % - request.user.username) + logging.user(request.user, "~BC~FRAttempting to delete ~SB%s~SN's account." % request.user.username) form = DeleteAccountForm(user=request.user) return { - 'delete_form': form, + "delete_form": form, } - -@render_to('profile/forgot_password.xhtml') + +@render_to("profile/forgot_password.xhtml") def forgot_password(request): - if request.method == 'POST': + if request.method == "POST": form = ForgotPasswordForm(request.POST) if form.is_valid(): - logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST['email']) + logging.user(request.user, "~BC~FRForgot password: ~SB%s" % request.POST["email"]) try: - user = User.objects.get(email__iexact=request.POST['email']) + user = User.objects.get(email__iexact=request.POST["email"]) except User.MultipleObjectsReturned: - user = User.objects.filter(email__iexact=request.POST['email'])[0] + user = User.objects.filter(email__iexact=request.POST["email"])[0] user.profile.send_forgot_password_email() - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" % - request.POST['email']) + logging.user(request.user, "~BC~FRFailed forgot password: ~SB%s~SN" % request.POST["email"]) else: logging.user(request.user, "~BC~FRAttempting to retrieve forgotton password.") form = ForgotPasswordForm() return { - 'forgot_password_form': form, + "forgot_password_form": form, } - + + @login_required -@render_to('profile/forgot_password_return.xhtml') +@render_to("profile/forgot_password_return.xhtml") def forgot_password_return(request): - if request.method == 'POST': - logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." % - request.user.username) - new_password = request.POST.get('password', '') + if request.method == "POST": + logging.user(request.user, "~BC~FRReseting ~SB%s~SN's password." % request.user.username) + new_password = request.POST.get("password", "") request.user.set_password(new_password) request.user.save() - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: - logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." % - request.user.username) + logging.user(request.user, "~BC~FRAttempting to reset ~SB%s~SN's password." % request.user.username) form = ForgotPasswordReturnForm() return { - 'forgot_password_return_form': form, + "forgot_password_return_form": form, } + @ajax_login_required @json.json_view def delete_starred_stories(request): - timestamp = request.POST.get('timestamp', None) + timestamp = request.POST.get("timestamp", None) if timestamp: delete_date = datetime.datetime.fromtimestamp(int(timestamp)) else: delete_date = datetime.datetime.now() - starred_stories = MStarredStory.objects.filter(user_id=request.user.pk, - starred_date__lte=delete_date) + starred_stories = MStarredStory.objects.filter(user_id=request.user.pk, starred_date__lte=delete_date) stories_deleted = starred_stories.count() starred_stories.delete() MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) - - logging.user(request.user, "~BC~FRDeleting %s/%s starred stories (%s)" % (stories_deleted, - stories_deleted+starred_count, delete_date)) - return dict(code=1, stories_deleted=stories_deleted, starred_counts=starred_counts, - starred_count=starred_count) + logging.user( + request.user, + "~BC~FRDeleting %s/%s starred stories (%s)" + % (stories_deleted, stories_deleted + starred_count, delete_date), + ) + + return dict( + code=1, stories_deleted=stories_deleted, starred_counts=starred_counts, starred_count=starred_count + ) @ajax_login_required @json.json_view def delete_all_sites(request): - request.user.profile.send_opml_export_email(reason="You have deleted all of your sites, so here's a backup of all of your subscriptions just in case.") - + request.user.profile.send_opml_export_email( + reason="You have deleted all of your sites, so here's a backup of all of your subscriptions just in case." + ) + subs = UserSubscription.objects.filter(user=request.user) sub_count = subs.count() subs.delete() - + usf = UserSubscriptionFolders.objects.get(user=request.user) - usf.folders = '[]' + usf.folders = "[]" usf.save() - + logging.user(request.user, "~BC~FRDeleting %s sites" % sub_count) return dict(code=1) @login_required -@render_to('profile/email_optout.xhtml') +@render_to("profile/email_optout.xhtml") def email_optout(request): user = request.user user.profile.send_emails = False user.profile.save() - + return { "user": user, } + @json.json_view def ios_subscription_status(request): logging.debug(" ---> iOS Subscription Status: %s" % request.body) data = json.decode(request.body) - subject = "iOS Subscription Status: %s" % data.get('notification_type', "[missing]") + subject = "iOS Subscription Status: %s" % data.get("notification_type", "[missing]") message = """%s""" % (request.body) mail_admins(subject, message) - - return { - "code": 1 - } + + return {"code": 1} + def trigger_error(request): logging.user(request.user, "~BR~FW~SBTriggering divison by zero") division_by_zero = 1 / 0 - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) diff --git a/apps/push/migrations/0001_initial.py b/apps/push/migrations/0001_initial.py index 4792b60bcd..9baa20c60d 100644 --- a/apps/push/migrations/0001_initial.py +++ b/apps/push/migrations/0001_initial.py @@ -6,24 +6,31 @@ class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), ] operations = [ migrations.CreateModel( - name='PushSubscription', + name="PushSubscription", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('hub', models.URLField(db_index=True)), - ('topic', models.URLField(db_index=True)), - ('verified', models.BooleanField(default=False)), - ('verify_token', models.CharField(max_length=60)), - ('lease_expires', models.DateTimeField(default=datetime.datetime.now)), - ('feed', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='push', to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("hub", models.URLField(db_index=True)), + ("topic", models.URLField(db_index=True)), + ("verified", models.BooleanField(default=False)), + ("verify_token", models.CharField(max_length=60)), + ("lease_expires", models.DateTimeField(default=datetime.datetime.now)), + ( + "feed", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="push", to="rss_feeds.Feed" + ), + ), ], ), ] diff --git a/apps/push/models.py b/apps/push/models.py index d7b4c1c314..a128db5d16 100644 --- a/apps/push/models.py +++ b/apps/push/models.py @@ -15,22 +15,20 @@ from utils import log as logging from utils.feed_functions import timelimit, TimeoutError -DEFAULT_LEASE_SECONDS = (10 * 24 * 60 * 60) # 10 days +DEFAULT_LEASE_SECONDS = 10 * 24 * 60 * 60 # 10 days + class PushSubscriptionManager(models.Manager): - @timelimit(5) - def subscribe(self, topic, feed, hub=None, callback=None, - lease_seconds=None, force_retry=False): + def subscribe(self, topic, feed, hub=None, callback=None, lease_seconds=None, force_retry=False): if hub is None: hub = self._get_hub(topic) if hub is None: - raise TypeError('hub cannot be None if the feed does not provide it') + raise TypeError("hub cannot be None if the feed does not provide it") if lease_seconds is None: - lease_seconds = getattr(settings, 'PUBSUBHUBBUB_LEASE_SECONDS', - DEFAULT_LEASE_SECONDS) + lease_seconds = getattr(settings, "PUBSUBHUBBUB_LEASE_SECONDS", DEFAULT_LEASE_SECONDS) feed = Feed.get_by_id(feed.id) subscription, created = self.get_or_create(feed=feed) signals.pre_subscribe.send(sender=subscription, created=created) @@ -41,38 +39,44 @@ def subscribe(self, topic, feed, hub=None, callback=None, subscription.topic = feed.feed_link[:200] subscription.hub = hub subscription.save() - + if callback is None: - callback_path = reverse('push-callback', args=(subscription.pk,)) - callback = 'https://' + settings.PUSH_DOMAIN + callback_path + callback_path = reverse("push-callback", args=(subscription.pk,)) + callback = "https://" + settings.PUSH_DOMAIN + callback_path # callback = "https://push.newsblur.com/push/%s" % subscription.pk # + callback_path try: - response = self._send_request(hub, { - 'hub.mode' : 'subscribe', - 'hub.callback' : callback, - 'hub.topic' : topic, - 'hub.verify' : ['async', 'sync'], - 'hub.verify_token' : subscription.generate_token('subscribe'), - 'hub.lease_seconds' : lease_seconds, - }) + response = self._send_request( + hub, + { + "hub.mode": "subscribe", + "hub.callback": callback, + "hub.topic": topic, + "hub.verify": ["async", "sync"], + "hub.verify_token": subscription.generate_token("subscribe"), + "hub.lease_seconds": lease_seconds, + }, + ) except (requests.ConnectionError, requests.exceptions.MissingSchema): response = None if response and response.status_code == 204: subscription.verified = True - elif response and response.status_code == 202: # async verification + elif response and response.status_code == 202: # async verification subscription.verified = False else: error = response and response.text or "" - if not force_retry and 'You may only subscribe to' in error: + if not force_retry and "You may only subscribe to" in error: extracted_topic = re.search("You may only subscribe to (.*?) ", error) if extracted_topic: - subscription = self.subscribe(extracted_topic.group(1), - feed=feed, hub=hub, force_retry=True) + subscription = self.subscribe( + extracted_topic.group(1), feed=feed, hub=hub, force_retry=True + ) else: - logging.debug(u' ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)' % ( - subscription.feed.log_title[:30], error[:100], response and response.status_code)) + logging.debug( + " ---> [%-30s] ~FR~BKFeed failed to subscribe to push: %s (code: %s)" + % (subscription.feed.log_title[:30], error[:100], response and response.status_code) + ) subscription.save() feed.setup_push() @@ -80,18 +84,18 @@ def subscribe(self, topic, feed, hub=None, callback=None, signals.verified.send(sender=subscription) return subscription - def _get_hub(self, topic): parsed = feedparser.parse(topic) for link in parsed.feed.links: - if link['rel'] == 'hub': - return link['href'] + if link["rel"] == "hub": + return link["href"] def _send_request(self, url, data): return requests.post(url, data=data) + class PushSubscription(models.Model): - feed = models.OneToOneField(Feed, db_index=True, related_name='push', on_delete=models.CASCADE) + feed = models.OneToOneField(Feed, db_index=True, related_name="push", on_delete=models.CASCADE) hub = models.URLField(db_index=True) topic = models.URLField(db_index=True) verified = models.BooleanField(default=False) @@ -104,43 +108,45 @@ class PushSubscription(models.Model): # unique_together = [ # ('hub', 'topic') # ] - + def unsubscribe(self): feed = self.feed self.delete() feed.setup_push() - + def set_expiration(self, lease_seconds): - self.lease_expires = datetime.now() + timedelta( - seconds=lease_seconds) + self.lease_expires = datetime.now() + timedelta(seconds=lease_seconds) self.save() def generate_token(self, mode): - assert self.pk is not None, \ - 'Subscription must be saved before generating token' - token = mode[:20] + hashlib.sha1(('%s%i%s' % ( - settings.SECRET_KEY, self.pk, mode)).encode(encoding='utf-8')).hexdigest() + assert self.pk is not None, "Subscription must be saved before generating token" + token = ( + mode[:20] + + hashlib.sha1( + ("%s%i%s" % (settings.SECRET_KEY, self.pk, mode)).encode(encoding="utf-8") + ).hexdigest() + ) self.verify_token = token self.save() return token - + def check_urls_against_pushed_data(self, parsed): - if hasattr(parsed.feed, 'links'): # single notification + if hasattr(parsed.feed, "links"): # single notification hub_url = self.hub self_url = self.topic for link in parsed.feed.links: - href = link.get('href', '') - if any(w in href for w in ['wp-admin', 'wp-cron']): + href = link.get("href", "") + if any(w in href for w in ["wp-admin", "wp-cron"]): continue - - if link['rel'] == 'hub': - hub_url = link['href'] - elif link['rel'] == 'self': - self_url = link['href'] - - if hub_url and hub_url.startswith('//'): + + if link["rel"] == "hub": + hub_url = link["href"] + elif link["rel"] == "self": + self_url = link["href"] + + if hub_url and hub_url.startswith("//"): hub_url = "http:%s" % hub_url - + needs_update = False if hub_url and self.hub != hub_url: # hub URL has changed; let's update our subscription @@ -150,23 +156,24 @@ def check_urls_against_pushed_data(self, parsed): needs_update = True if needs_update: - logging.debug(u' ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s' % ( - self.feed, hub_url, self_url)) + logging.debug( + " ---> [%-30s] ~FR~BKUpdating PuSH hub/topic: %s / %s" % (self.feed, hub_url, self_url) + ) expiration_time = self.lease_expires - datetime.now() - seconds = expiration_time.days*86400 + expiration_time.seconds + seconds = expiration_time.days * 86400 + expiration_time.seconds try: PushSubscription.objects.subscribe( - self_url, feed=self.feed, hub=hub_url, - lease_seconds=seconds) + self_url, feed=self.feed, hub=hub_url, lease_seconds=seconds + ) except TimeoutError: - logging.debug(u' ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s' % ( - self.feed, hub_url, self_url)) - - + logging.debug( + " ---> [%-30s] ~FR~BKTimed out updating PuSH hub/topic: %s / %s" + % (self.feed, hub_url, self_url) + ) + def __str__(self): if self.verified: - verified = u'verified' + verified = "verified" else: - verified = u'unverified' - return u'to %s on %s: %s' % ( - self.topic, self.hub, verified) + verified = "unverified" + return "to %s on %s: %s" % (self.topic, self.hub, verified) diff --git a/apps/push/signals.py b/apps/push/signals.py index 2f2aa7d3d0..8c915dc808 100644 --- a/apps/push/signals.py +++ b/apps/push/signals.py @@ -2,6 +2,6 @@ from django.dispatch import Signal -pre_subscribe = Signal(providing_args=['created']) +pre_subscribe = Signal(providing_args=["created"]) verified = Signal() -updated = Signal(providing_args=['update']) +updated = Signal(providing_args=["update"]) diff --git a/apps/push/test_push.py b/apps/push/test_push.py index 8aac0d8828..cfbef8bfea 100644 --- a/apps/push/test_push.py +++ b/apps/push/test_push.py @@ -1,17 +1,17 @@ # Copyright 2009 - Participatory Culture Foundation -# +# # This file is part of djpubsubhubbub. -# +# # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: -# +# # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. -# +# # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. @@ -32,6 +32,7 @@ from apps.push.models import PushSubscription, PushSubscriptionManager from apps.push.signals import pre_subscribe, verified, updated + class MockResponse(object): def __init__(self, status, data=None): self.status = status @@ -42,13 +43,13 @@ def info(self): def read(self): if self.data is None: - return '' + return "" data, self.data = self.data, None return data -class PSHBTestBase: - urls = 'apps.push.urls' +class PSHBTestBase: + urls = "apps.push.urls" def setUp(self): self._old_send_request = PushSubscriptionManager._send_request @@ -57,8 +58,10 @@ def setUp(self): self.requests = [] self.signals = [] for connecter in pre_subscribe, verified, updated: + def callback(signal=None, **kwargs): self.signals.append((signal, kwargs)) + connecter.connect(callback, dispatch_uid=connecter, weak=False) def tearDown(self): @@ -71,34 +74,32 @@ def _send_request(self, url, data): self.requests.append((url, data)) return self.responses.pop() -class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): +class Test_PSHBSubscriptionManagerTest(PSHBTestBase, TestCase): def test_sync_verify(self): """ If the hub returns a 204 response, the subscription is verified and active. """ self.responses.append(MockResponse(204)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) + sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000) self.assertEquals(len(self.signals), 2) - self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, - 'created': True})) - self.assertEquals(self.signals[1], (verified, {'sender': sub})) - self.assertEquals(sub.hub, 'hub') - self.assertEquals(sub.topic, 'topic') + self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True})) + self.assertEquals(self.signals[1], (verified, {"sender": sub})) + self.assertEquals(sub.hub, "hub") + self.assertEquals(sub.topic, "topic") self.assertEquals(sub.verified, True) rough_expires = datetime.now() + timedelta(seconds=2000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[0], 'hub') - self.assertEquals(request[1]['mode'], 'subscribe') - self.assertEquals(request[1]['topic'], 'topic') - self.assertEquals(request[1]['callback'], 'callback') - self.assertEquals(request[1]['verify'], ('async', 'sync')) - self.assertEquals(request[1]['verify_token'], sub.verify_token) - self.assertEquals(request[1]['lease_seconds'], 2000) + self.assertEquals(request[0], "hub") + self.assertEquals(request[1]["mode"], "subscribe") + self.assertEquals(request[1]["topic"], "topic") + self.assertEquals(request[1]["callback"], "callback") + self.assertEquals(request[1]["verify"], ("async", "sync")) + self.assertEquals(request[1]["verify_token"], sub.verify_token) + self.assertEquals(request[1]["lease_seconds"], 2000) def test_async_verify(self): """ @@ -106,25 +107,23 @@ def test_async_verify(self): subscription is verified. """ self.responses.append(MockResponse(202)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback', 2000) + sub = PushSubscription.objects.subscribe("topic", "hub", "callback", 2000) self.assertEquals(len(self.signals), 1) - self.assertEquals(self.signals[0], (pre_subscribe, {'sender': sub, - 'created': True})) - self.assertEquals(sub.hub, 'hub') - self.assertEquals(sub.topic, 'topic') + self.assertEquals(self.signals[0], (pre_subscribe, {"sender": sub, "created": True})) + self.assertEquals(sub.hub, "hub") + self.assertEquals(sub.topic, "topic") self.assertEquals(sub.verified, False) rough_expires = datetime.now() + timedelta(seconds=2000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[0], 'hub') - self.assertEquals(request[1]['mode'], 'subscribe') - self.assertEquals(request[1]['topic'], 'topic') - self.assertEquals(request[1]['callback'], 'callback') - self.assertEquals(request[1]['verify'], ('async', 'sync')) - self.assertEquals(request[1]['verify_token'], sub.verify_token) - self.assertEquals(request[1]['lease_seconds'], 2000) + self.assertEquals(request[0], "hub") + self.assertEquals(request[1]["mode"], "subscribe") + self.assertEquals(request[1]["topic"], "topic") + self.assertEquals(request[1]["callback"], "callback") + self.assertEquals(request[1]["verify"], ("async", "sync")) + self.assertEquals(request[1]["verify_token"], sub.verify_token) + self.assertEquals(request[1]["lease_seconds"], 2000) def test_least_seconds_default(self): """ @@ -132,53 +131,51 @@ def test_least_seconds_default(self): should default to 2592000 (30 days). """ self.responses.append(MockResponse(202)) - sub = PushSubscription.objects.subscribe('topic', 'hub', 'callback') + sub = PushSubscription.objects.subscribe("topic", "hub", "callback") rough_expires = datetime.now() + timedelta(seconds=2592000) - self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, - 'lease more than 5 seconds off') + self.assert_(abs(sub.lease_expires - rough_expires).seconds < 5, "lease more than 5 seconds off") self.assertEquals(len(self.requests), 1) request = self.requests[0] - self.assertEquals(request[1]['lease_seconds'], 2592000) + self.assertEquals(request[1]["lease_seconds"], 2592000) def test_error_on_subscribe_raises_URLError(self): """ If a non-202/204 status is returned, raise a URLError. """ - self.responses.append(MockResponse(500, 'error data')) + self.responses.append(MockResponse(500, "error data")) try: - PushSubscription.objects.subscribe('topic', 'hub', 'callback') + PushSubscription.objects.subscribe("topic", "hub", "callback") except urllib.error.URLError as e: - self.assertEquals(e.reason, - 'error subscribing to topic on hub:\nerror data') + self.assertEquals(e.reason, "error subscribing to topic on hub:\nerror data") else: - self.fail('subscription did not raise URLError exception') + self.fail("subscription did not raise URLError exception") -class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase): +class Test_PSHBCallbackViewCase(PSHBTestBase, TestCase): def test_verify(self): """ Getting the callback from the server should verify the subscription. """ - sub = PushSubscription.objects.create( - topic='topic', - hub='hub', - verified=False) - verify_token = sub.generate_token('subscribe') - - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token}) + sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False) + verify_token = sub.generate_token("subscribe") + + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token, + }, + ) self.assertEquals(response.status_code, 200) - self.assertEquals(response.content, 'challenge') + self.assertEquals(response.content, "challenge") sub = PushSubscription.objects.get(pk=sub.pk) self.assertEquals(sub.verified, True) self.assertEquals(len(self.signals), 1) - self.assertEquals(self.signals[0], (verified, {'sender': sub})) + self.assertEquals(self.signals[0], (verified, {"sender": sub})) def test_404(self): """ @@ -189,54 +186,63 @@ def test_404(self): * subscription doesn't exist * token doesn't match the subscription """ - sub = PushSubscription.objects.create( - topic='topic', - hub='hub', - verified=False) - verify_token = sub.generate_token('subscribe') - - response = self.client.get(reverse('pubsubhubbub_callback', - args=(0,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[1:]}) + sub = PushSubscription.objects.create(topic="topic", hub="hub", verified=False) + verify_token = sub.generate_token("subscribe") + + response = self.client.get( + reverse("pubsubhubbub_callback", args=(0,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[1:], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[1:]}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[1:], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic + 'extra', - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic + "extra", + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token, + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) - response = self.client.get(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - {'hub.mode': 'subscribe', - 'hub.topic': sub.topic, - 'hub.challenge': 'challenge', - 'hub.lease_seconds': 2000, - 'hub.verify_token': verify_token[:-5]}) + response = self.client.get( + reverse("pubsubhubbub_callback", args=(sub.pk,)), + { + "hub.mode": "subscribe", + "hub.topic": sub.topic, + "hub.challenge": "challenge", + "hub.lease_seconds": 2000, + "hub.verify_token": verify_token[:-5], + }, + ) self.assertEquals(response.status_code, 404) self.assertEquals(len(self.signals), 0) -class Test_PSHBUpdateCase(PSHBTestBase, TestCase): +class Test_PSHBUpdateCase(PSHBTestBase, TestCase): def test_update(self): # this data comes from # http://pubsubhubbub.googlecode.com/svn/trunk/pubsubhubbub-core-0.1.html#anchor3 @@ -293,32 +299,27 @@ def test_update(self): """ sub = PushSubscription.objects.create( - hub="http://myhub.example.com/endpoint", - topic="http://publisher.example.com/happycats.xml") + hub="http://myhub.example.com/endpoint", topic="http://publisher.example.com/happycats.xml" + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals(len(callback_data), 1) sender, update = callback_data[0] self.assertEquals(sender, sub) self.assertEquals(len(update.entries), 4) - self.assertEquals(update.entries[0].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[1].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[2].id, - 'http://publisher.example.com/happycat25.xml') - self.assertEquals(update.entries[3].id, - 'http://publisher.example.com/happycat25.xml') + self.assertEquals(update.entries[0].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[1].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[2].id, "http://publisher.example.com/happycat25.xml") + self.assertEquals(update.entries[3].id, "http://publisher.example.com/happycat25.xml") def test_update_with_changed_hub(self): update_data = """ @@ -343,31 +344,32 @@ def test_update_with_changed_hub(self): sub = PushSubscription.objects.create( hub="hub", topic="http://publisher.example.com/happycats.xml", - lease_expires=datetime.now() + timedelta(days=1)) + lease_expires=datetime.now() + timedelta(days=1), + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) def test_update_with_changed_self(self): update_data = """ @@ -392,30 +394,32 @@ def test_update_with_changed_self(self): sub = PushSubscription.objects.create( hub="http://myhub.example.com/endpoint", topic="topic", - lease_expires=datetime.now() + timedelta(days=1)) + lease_expires=datetime.now() + timedelta(days=1), + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', kwargs={'push_id': sub.pk}), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", kwargs={"push_id": sub.pk}), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) def test_update_with_changed_hub_and_self(self): update_data = """ @@ -438,30 +442,29 @@ def test_update_with_changed_hub_and_self(self): """ sub = PushSubscription.objects.create( - hub="hub", - topic="topic", - lease_expires=datetime.now() + timedelta(days=1)) + hub="hub", topic="topic", lease_expires=datetime.now() + timedelta(days=1) + ) callback_data = [] updated.connect( - lambda sender=None, update=None, **kwargs: callback_data.append( - (sender, update)), - weak=False) + lambda sender=None, update=None, **kwargs: callback_data.append((sender, update)), weak=False + ) self.responses.append(MockResponse(204)) - response = self.client.post(reverse('pubsubhubbub_callback', - args=(sub.pk,)), - update_data, 'application/atom+xml') + response = self.client.post( + reverse("pubsubhubbub_callback", args=(sub.pk,)), update_data, "application/atom+xml" + ) self.assertEquals(response.status_code, 200) self.assertEquals( PushSubscription.objects.filter( - hub='http://myhub.example.com/endpoint', - topic='http://publisher.example.com/happycats.xml', - verified=True).count(), 1) + hub="http://myhub.example.com/endpoint", + topic="http://publisher.example.com/happycats.xml", + verified=True, + ).count(), + 1, + ) self.assertEquals(len(self.requests), 1) - self.assertEquals(self.requests[0][0], - 'http://myhub.example.com/endpoint') - self.assertEquals(self.requests[0][1]['callback'], - 'http://test.nb.local.com/1/') - self.assert_((self.requests[0][1]['lease_seconds'] - 86400) < 5) + self.assertEquals(self.requests[0][0], "http://myhub.example.com/endpoint") + self.assertEquals(self.requests[0][1]["callback"], "http://test.nb.local.com/1/") + self.assert_((self.requests[0][1]["lease_seconds"] - 86400) < 5) diff --git a/apps/push/urls.py b/apps/push/urls.py index 223e40b290..08a4d2a46c 100644 --- a/apps/push/urls.py +++ b/apps/push/urls.py @@ -2,5 +2,5 @@ from apps.push import views urlpatterns = [ - url(r'^(?P\d+)/?$', views.push_callback, name='push-callback'), + url(r"^(?P\d+)/?$", views.push_callback, name="push-callback"), ] diff --git a/apps/push/views.py b/apps/push/views.py index 78460008d8..71451b80ed 100644 --- a/apps/push/views.py +++ b/apps/push/views.py @@ -13,43 +13,49 @@ from apps.rss_feeds.models import MFetchHistory from utils import log as logging + def push_callback(request, push_id): - if request.method == 'GET': - mode = request.GET['hub.mode'] - topic = request.GET['hub.topic'] - challenge = request.GET.get('hub.challenge', '') - lease_seconds = request.GET.get('hub.lease_seconds') - verify_token = request.GET.get('hub.verify_token', '') + if request.method == "GET": + mode = request.GET["hub.mode"] + topic = request.GET["hub.topic"] + challenge = request.GET.get("hub.challenge", "") + lease_seconds = request.GET.get("hub.lease_seconds") + verify_token = request.GET.get("hub.verify_token", "") - if mode == 'subscribe': - if not verify_token.startswith('subscribe'): + if mode == "subscribe": + if not verify_token.startswith("subscribe"): raise Http404 - subscription = get_object_or_404(PushSubscription, - pk=push_id, - topic=topic, - verify_token=verify_token) + subscription = get_object_or_404( + PushSubscription, pk=push_id, topic=topic, verify_token=verify_token + ) subscription.verified = True subscription.set_expiration(int(lease_seconds)) subscription.save() subscription.feed.setup_push() - logging.debug(' ---> [%-30s] [%s] ~BBVerified PuSH' % (subscription.feed, subscription.feed_id)) + logging.debug(" ---> [%-30s] [%s] ~BBVerified PuSH" % (subscription.feed, subscription.feed_id)) verified.send(sender=subscription) - return HttpResponse(challenge, content_type='text/plain') - elif request.method == 'POST': + return HttpResponse(challenge, content_type="text/plain") + elif request.method == "POST": subscription = get_object_or_404(PushSubscription, pk=push_id) fetch_history = MFetchHistory.feed(subscription.feed_id) latest_push_date_delta = None - if fetch_history and fetch_history.get('push_history'): - latest_push = fetch_history['push_history'][0]['push_date'] - latest_push_date = datetime.datetime.strptime(latest_push, '%Y-%m-%d %H:%M:%S') + if fetch_history and fetch_history.get("push_history"): + latest_push = fetch_history["push_history"][0]["push_date"] + latest_push_date = datetime.datetime.strptime(latest_push, "%Y-%m-%d %H:%M:%S") latest_push_date_delta = datetime.datetime.now() - latest_push_date if latest_push_date > datetime.datetime.now() - datetime.timedelta(minutes=1): - logging.debug(' ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago' % (subscription.feed, latest_push_date_delta.seconds)) - return HttpResponse('Slow down, you just pushed %s seconds ago...' % latest_push_date_delta.seconds, status=429) - + logging.debug( + " ---> [%-30s] ~SN~FBSkipping feed fetch, pushed %s seconds ago" + % (subscription.feed, latest_push_date_delta.seconds) + ) + return HttpResponse( + "Slow down, you just pushed %s seconds ago..." % latest_push_date_delta.seconds, + status=429, + ) + # XXX TODO: Optimize this by removing feedparser. It just needs to find out # the hub_url or topic has changed. ElementTree could do it. if random.random() < 0.1: @@ -63,10 +69,12 @@ def push_callback(request, push_id): # subscription.feed.queue_pushed_feed_xml(request.body) if subscription.feed.active_subscribers >= 1: subscription.feed.queue_pushed_feed_xml("Fetch me", latest_push_date_delta=latest_push_date_delta) - MFetchHistory.add(feed_id=subscription.feed_id, - fetch_type='push') + MFetchHistory.add(feed_id=subscription.feed_id, fetch_type="push") else: - logging.debug(' ---> [%-30s] ~FBSkipping feed fetch, no actives: %s' % (subscription.feed, subscription.feed)) - - return HttpResponse('OK') + logging.debug( + " ---> [%-30s] ~FBSkipping feed fetch, no actives: %s" + % (subscription.feed, subscription.feed) + ) + + return HttpResponse("OK") return Http404 diff --git a/apps/reader/admin.py b/apps/reader/admin.py index 03daf35fc6..fca4f8acad 100644 --- a/apps/reader/admin.py +++ b/apps/reader/admin.py @@ -3,4 +3,4 @@ admin.site.register(UserSubscription) admin.site.register(UserSubscriptionFolders) -admin.site.register(Feature) \ No newline at end of file +admin.site.register(Feature) diff --git a/apps/reader/factories.py b/apps/reader/factories.py index ba7f22c221..edba6c2814 100644 --- a/apps/reader/factories.py +++ b/apps/reader/factories.py @@ -8,6 +8,7 @@ fake = Faker() + def generate_folder(): string = '{"' string += " ".join(fake.words(2)) @@ -18,12 +19,13 @@ def generate_folder(): string += "]}," return string + def generate_folders(): """ "folders": "[5299728, 644144, 1187026, {\"Brainiacs & Opinion\": [569, 38, 3581, 183139, 1186180, 15]}, {\"Science & Technology\": [731503, 140145, 1272495, 76, 161, 39, {\"Hacker\": [5985150, 3323431]}]}, {\"Humor\": [212379, 3530, 5994357]}, {\"Videos\": [3240, 5168]}]" """ string = '"folders":[' - + for _ in range(3): string += f"{fake.pyint()}, " for _ in range(3): @@ -32,6 +34,7 @@ def generate_folders(): string = string[:-1] + "]" return string + class UserSubscriptionFoldersFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) folders = FuzzyAttribute(generate_folders) @@ -39,18 +42,19 @@ class UserSubscriptionFoldersFactory(DjangoModelFactory): class Meta: model = UserSubscriptionFolders - + class UserSubscriptionFactory(DjangoModelFactory): user = factory.SubFactory(UserFactory) feed = FuzzyAttribute(FeedFactory) - last_read_date = factory.Faker('date_time') + last_read_date = factory.Faker("date_time") class Meta: model = UserSubscription class FeatureFactory(DjangoModelFactory): - description = factory.Faker('text') - date = factory.Faker('date_time') + description = factory.Faker("text") + date = factory.Faker("date_time") + class Meta: model = Feature diff --git a/apps/reader/forms.py b/apps/reader/forms.py index c0fe51d48a..a09cc04757 100644 --- a/apps/reader/forms.py +++ b/apps/reader/forms.py @@ -15,13 +15,18 @@ class LoginForm(forms.Form): - username = forms.CharField(label=_("Username or Email"), max_length=30, - widget=forms.TextInput(attrs={'tabindex': 1, 'class': 'NB-input'}), - error_messages={'required': 'Please enter a username.'}) - password = forms.CharField(label=_("Password"), - widget=forms.PasswordInput(attrs={'tabindex': 2, 'class': 'NB-input'}), - required=False) - # error_messages={'required': 'Please enter a password.'}) + username = forms.CharField( + label=_("Username or Email"), + max_length=30, + widget=forms.TextInput(attrs={"tabindex": 1, "class": "NB-input"}), + error_messages={"required": "Please enter a username."}, + ) + password = forms.CharField( + label=_("Password"), + widget=forms.PasswordInput(attrs={"tabindex": 2, "class": "NB-input"}), + required=False, + ) + # error_messages={'required': 'Please enter a password.'}) add = forms.CharField(required=False, widget=forms.HiddenInput()) def __init__(self, *args, **kwargs): @@ -29,10 +34,10 @@ def __init__(self, *args, **kwargs): super(LoginForm, self).__init__(*args, **kwargs) def clean(self): - username = self.cleaned_data.get('username', '').lower() - password = self.cleaned_data.get('password', '') - - if '@' in username: + username = self.cleaned_data.get("username", "").lower() + password = self.cleaned_data.get("password", "") + + if "@" in username: user = User.objects.filter(email=username) if not user: user = User.objects.filter(email__iexact=username) @@ -60,13 +65,15 @@ def clean(self): if blank: email_user.set_password(email_user.username) email_user.save() - self.user_cache = authenticate(username=email_user.username, password=email_user.username) + self.user_cache = authenticate( + username=email_user.username, password=email_user.username + ) if self.user_cache is None: logging.info(" ***> [%s] Bad Login" % username) raise forms.ValidationError(_("Whoopsy-daisy, wrong password. Try again.")) elif username and not user: raise forms.ValidationError(_("That username is not registered. Please try again.")) - + return self.cleaned_data def get_user_id(self): @@ -81,113 +88,135 @@ def get_user(self): class SignupForm(forms.Form): use_required_attribute = False - username = forms.RegexField(regex=r'^\w+$', - max_length=30, - widget=forms.TextInput(attrs={'class': 'NB-input'}), - label=_('Username'), - error_messages={ - 'required': 'Please enter a username.', - 'invalid': "Your username may only contain letters and numbers." - }) - email = forms.EmailField(widget=forms.TextInput(attrs={'maxlength': 75, 'class': 'NB-input'}), - label=_('Email'), - required=True, - error_messages={'required': 'Please enter an email.'}) - password = forms.CharField(widget=forms.PasswordInput(attrs={'class': 'NB-input'}, - render_value=True,), - label=_('Password'), - required=False) - # error_messages={'required': 'Please enter a password.'}) - + username = forms.RegexField( + regex=r"^\w+$", + max_length=30, + widget=forms.TextInput(attrs={"class": "NB-input"}), + label=_("Username"), + error_messages={ + "required": "Please enter a username.", + "invalid": "Your username may only contain letters and numbers.", + }, + ) + email = forms.EmailField( + widget=forms.TextInput(attrs={"maxlength": 75, "class": "NB-input"}), + label=_("Email"), + required=True, + error_messages={"required": "Please enter an email."}, + ) + password = forms.CharField( + widget=forms.PasswordInput( + attrs={"class": "NB-input"}, + render_value=True, + ), + label=_("Password"), + required=False, + ) + # error_messages={'required': 'Please enter a password.'}) + def clean_username(self): - username = self.cleaned_data['username'] + username = self.cleaned_data["username"] return username def clean_password(self): - if not self.cleaned_data['password']: + if not self.cleaned_data["password"]: return "" - return self.cleaned_data['password'] - + return self.cleaned_data["password"] + def clean_email(self): - email = self.cleaned_data.get('email', None) + email = self.cleaned_data.get("email", None) if email: email_exists = User.objects.filter(email__iexact=email).count() if email_exists: - raise forms.ValidationError(_('Someone is already using that email address.')) - if any([banned in email for banned in ['mailwire24', 'mailbox9', 'scintillamail', 'bluemailboxes', 'devmailing']]): - logging.info(" ***> [%s] Spammer signup banned: %s/%s" % (self.cleaned_data.get('username', None), self.cleaned_data.get('password', None), email)) - raise forms.ValidationError('Seriously, fuck off spammer.') + raise forms.ValidationError(_("Someone is already using that email address.")) + if any( + [ + banned in email + for banned in ["mailwire24", "mailbox9", "scintillamail", "bluemailboxes", "devmailing"] + ] + ): + logging.info( + " ***> [%s] Spammer signup banned: %s/%s" + % ( + self.cleaned_data.get("username", None), + self.cleaned_data.get("password", None), + email, + ) + ) + raise forms.ValidationError("Seriously, fuck off spammer.") try: - domain = email.rsplit('@', 1)[-1] - if not query(domain, 'MX'): - raise forms.ValidationError('Sorry, that email is invalid.') + domain = email.rsplit("@", 1)[-1] + if not query(domain, "MX"): + raise forms.ValidationError("Sorry, that email is invalid.") except (NXDOMAIN, NoNameservers, NoAnswer): - raise forms.ValidationError('Sorry, that email is invalid.') + raise forms.ValidationError("Sorry, that email is invalid.") except NoResolverConfiguration as e: logging.info(f" ***> ~FRFailed to check spamminess of domain: ~FY{domain} ~FR{e}") pass - return self.cleaned_data['email'] - + return self.cleaned_data["email"] + def clean(self): - username = self.cleaned_data.get('username', '') - password = self.cleaned_data.get('password', '') - email = self.cleaned_data.get('email', None) - + username = self.cleaned_data.get("username", "") + password = self.cleaned_data.get("password", "") + email = self.cleaned_data.get("email", None) + exists = User.objects.filter(username__iexact=username).count() if exists: user_auth = authenticate(username=username, password=password) if not user_auth: - raise forms.ValidationError(_('Someone is already using that username.')) - + raise forms.ValidationError(_("Someone is already using that username.")) + return self.cleaned_data - + def save(self, profile_callback=None): - username = self.cleaned_data['username'] - password = self.cleaned_data['password'] - email = self.cleaned_data['email'] + username = self.cleaned_data["username"] + password = self.cleaned_data["password"] + email = self.cleaned_data["email"] exists = User.objects.filter(username__iexact=username).count() if exists: user_auth = authenticate(username=username, password=password) if not user_auth: - raise forms.ValidationError(_('Someone is already using that username.')) + raise forms.ValidationError(_("Someone is already using that username.")) else: return user_auth - + if not password: password = username - + new_user = User(username=username) new_user.set_password(password) - if not getattr(settings, 'AUTO_ENABLE_NEW_USERS', True): + if not getattr(settings, "AUTO_ENABLE_NEW_USERS", True): new_user.is_active = False new_user.email = email new_user.last_login = datetime.datetime.now() new_user.save() - new_user = authenticate(username=username, - password=password) + new_user = authenticate(username=username, password=password) new_user = User.objects.get(username=username) MActivity.new_signup(user_id=new_user.pk) - + RNewUserQueue.add_user(new_user.pk) - + if new_user.email: EmailNewUser.delay(user_id=new_user.pk) - - if getattr(settings, 'AUTO_PREMIUM_NEW_USERS', False): + + if getattr(settings, "AUTO_PREMIUM_NEW_USERS", False): new_user.profile.activate_premium() - elif getattr(settings, 'AUTO_ENABLE_NEW_USERS', False): + elif getattr(settings, "AUTO_ENABLE_NEW_USERS", False): new_user.profile.activate_free() - + return new_user + class FeatureForm(forms.Form): use_required_attribute = False description = forms.CharField(required=True) - + def save(self): - feature = Feature(description=self.cleaned_data['description'], - date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1)) + feature = Feature( + description=self.cleaned_data["description"], + date=datetime.datetime.utcnow() + datetime.timedelta(minutes=1), + ) feature.save() return feature diff --git a/apps/reader/http.py b/apps/reader/http.py index 6fee2f04ba..af0edbff0f 100644 --- a/apps/reader/http.py +++ b/apps/reader/http.py @@ -1,8 +1,9 @@ from django.shortcuts import render + def respond(request, template_name, context_dict, **kwargs): """ Use this function rather than render_to_response directly. The idea is to ensure that we're always using RequestContext. It's too easy to forget. """ - return render(request, template_name, context_dict, **kwargs) \ No newline at end of file + return render(request, template_name, context_dict, **kwargs) diff --git a/apps/reader/managers.py b/apps/reader/managers.py index 697fda2876..adafc88fa6 100644 --- a/apps/reader/managers.py +++ b/apps/reader/managers.py @@ -4,32 +4,36 @@ from apps.rss_feeds.models import DuplicateFeed from utils import log as logging + class UserSubscriptionManager(models.Manager): def get(self, *args, **kwargs): try: return super(UserSubscriptionManager, self).get(*args, **kwargs) except self.model.DoesNotExist as exception: - if isinstance(kwargs.get('feed'), int): - feed_id = kwargs.get('feed') - elif 'feed' in kwargs: - feed_id = kwargs['feed'].pk - elif 'feed__pk' in kwargs: - feed_id = kwargs['feed__pk'] - elif 'feed_id' in kwargs: - feed_id = kwargs['feed_id'] + if isinstance(kwargs.get("feed"), int): + feed_id = kwargs.get("feed") + elif "feed" in kwargs: + feed_id = kwargs["feed"].pk + elif "feed__pk" in kwargs: + feed_id = kwargs["feed__pk"] + elif "feed_id" in kwargs: + feed_id = kwargs["feed_id"] dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if dupe_feed: feed = dupe_feed[0].feed - if 'feed' in kwargs: - kwargs['feed'] = feed - elif 'feed__pk' in kwargs: - kwargs['feed__pk'] = feed.pk - elif 'feed_id' in kwargs: - kwargs['feed_id'] = feed.pk - user = kwargs.get('user') + if "feed" in kwargs: + kwargs["feed"] = feed + elif "feed__pk" in kwargs: + kwargs["feed__pk"] = feed.pk + elif "feed_id" in kwargs: + kwargs["feed_id"] = feed.pk + user = kwargs.get("user") if isinstance(user, int): user = User.objects.get(pk=user) - logging.debug(" ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" % (user and user.username, feed, feed_id)) + logging.debug( + " ---> [%s] ~BRFound dupe UserSubscription: ~SB%s (%s)" + % (user and user.username, feed, feed_id) + ) return super(UserSubscriptionManager, self).get(*args, **kwargs) else: raise exception diff --git a/apps/reader/migrations/0001_initial.py b/apps/reader/migrations/0001_initial.py index da718c584b..61e86b9e50 100644 --- a/apps/reader/migrations/0001_initial.py +++ b/apps/reader/migrations/0001_initial.py @@ -8,60 +8,87 @@ class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='Feature', + name="Feature", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('description', models.TextField(default='')), - ('date', models.DateTimeField(default=datetime.datetime.now)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("description", models.TextField(default="")), + ("date", models.DateTimeField(default=datetime.datetime.now)), ], options={ - 'ordering': ['-date'], + "ordering": ["-date"], }, ), migrations.CreateModel( - name='UserSubscription', + name="UserSubscription", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('user_title', models.CharField(blank=True, max_length=255, null=True)), - ('active', models.BooleanField(default=False)), - ('last_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), - ('mark_read_date', models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), - ('unread_count_neutral', models.IntegerField(default=0)), - ('unread_count_positive', models.IntegerField(default=0)), - ('unread_count_negative', models.IntegerField(default=0)), - ('unread_count_updated', models.DateTimeField(default=datetime.datetime.now)), - ('oldest_unread_story_date', models.DateTimeField(default=datetime.datetime.now)), - ('needs_unread_recalc', models.BooleanField(default=False)), - ('feed_opens', models.IntegerField(default=0)), - ('is_trained', models.BooleanField(default=False)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscribers', to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='subscriptions', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("user_title", models.CharField(blank=True, max_length=255, null=True)), + ("active", models.BooleanField(default=False)), + ("last_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), + ("mark_read_date", models.DateTimeField(default=apps.reader.models.unread_cutoff_default)), + ("unread_count_neutral", models.IntegerField(default=0)), + ("unread_count_positive", models.IntegerField(default=0)), + ("unread_count_negative", models.IntegerField(default=0)), + ("unread_count_updated", models.DateTimeField(default=datetime.datetime.now)), + ("oldest_unread_story_date", models.DateTimeField(default=datetime.datetime.now)), + ("needs_unread_recalc", models.BooleanField(default=False)), + ("feed_opens", models.IntegerField(default=0)), + ("is_trained", models.BooleanField(default=False)), + ( + "feed", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="subscribers", + to="rss_feeds.Feed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="subscriptions", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), migrations.CreateModel( - name='UserSubscriptionFolders', + name="UserSubscriptionFolders", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('folders', models.TextField(default='[]')), - ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("folders", models.TextField(default="[]")), + ( + "user", + models.OneToOneField( + on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL + ), + ), ], options={ - 'verbose_name': 'folder', - 'verbose_name_plural': 'folders', + "verbose_name": "folder", + "verbose_name_plural": "folders", }, ), migrations.AlterUniqueTogether( - name='usersubscription', - unique_together={('user', 'feed')}, + name="usersubscription", + unique_together={("user", "feed")}, ), ] diff --git a/apps/reader/models.py b/apps/reader/models.py index 3ff6f0d381..ca3d3b872b 100644 --- a/apps/reader/models.py +++ b/apps/reader/models.py @@ -22,24 +22,31 @@ from apps.rss_feeds.models import Feed, MStory, DuplicateFeed from apps.rss_feeds.tasks import NewFeeds from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.analyzer.tfidf import tfidf from utils.feed_functions import add_object_to_folder, chunks + def unread_cutoff_default(): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - + + class UserSubscription(models.Model): """ A feed which a user has subscribed to. Carries all of the cached information about the subscription, including unread counts of the three primary scores. - + Also has a dirty flag (needs_unread_recalc) which means that the unread counts are not accurate and need to be calculated with `self.calculate_feed_scores()`. """ - - user = models.ForeignKey(User, related_name='subscriptions', on_delete=models.CASCADE) - feed = models.ForeignKey(Feed, related_name='subscribers', on_delete=models.CASCADE) + + user = models.ForeignKey(User, related_name="subscriptions", on_delete=models.CASCADE) + feed = models.ForeignKey(Feed, related_name="subscribers", on_delete=models.CASCADE) user_title = models.CharField(max_length=255, null=True, blank=True) active = models.BooleanField(default=False) last_read_date = models.DateTimeField(default=unread_cutoff_default) @@ -52,32 +59,31 @@ class UserSubscription(models.Model): needs_unread_recalc = models.BooleanField(default=False) feed_opens = models.IntegerField(default=0) is_trained = models.BooleanField(default=False) - + objects = UserSubscriptionManager() def __str__(self): - return '[%s (%s): %s (%s)] ' % (self.user.username, self.user.pk, - self.feed.feed_title, self.feed.pk) - + return "[%s (%s): %s (%s)] " % (self.user.username, self.user.pk, self.feed.feed_title, self.feed.pk) + class Meta: unique_together = ("user", "feed") - + def canonical(self, full=False, include_favicon=True, classifiers=None): - feed = self.feed.canonical(full=full, include_favicon=include_favicon) - feed['feed_title'] = self.user_title or feed['feed_title'] - feed['ps'] = self.unread_count_positive - feed['nt'] = self.unread_count_neutral - feed['ng'] = self.unread_count_negative - feed['active'] = self.active - feed['feed_opens'] = self.feed_opens - feed['subscribed'] = True + feed = self.feed.canonical(full=full, include_favicon=include_favicon) + feed["feed_title"] = self.user_title or feed["feed_title"] + feed["ps"] = self.unread_count_positive + feed["nt"] = self.unread_count_neutral + feed["ng"] = self.unread_count_negative + feed["active"] = self.active + feed["feed_opens"] = self.feed_opens + feed["subscribed"] = True if classifiers: - feed['classifiers'] = classifiers + feed["classifiers"] = classifiers return feed - + def save(self, *args, **kwargs): - user_title_max = self._meta.get_field('user_title').max_length + user_title_max = self._meta.get_field("user_title").max_length if self.user_title and len(self.user_title) > user_title_max: self.user_title = self.user_title[:user_title_max] try: @@ -91,37 +97,50 @@ def save(self, *args, **kwargs): super(UserSubscription, self).save(*args, **kwargs) break else: - if self and self.id: self.delete() - + if self and self.id: + self.delete() + @classmethod def subs_for_feeds(cls, user_id, feed_ids=None, read_filter="unread"): usersubs = cls.objects if read_filter == "unread": - usersubs = usersubs.filter(Q(unread_count_neutral__gt=0) | - Q(unread_count_positive__gt=0)) + usersubs = usersubs.filter(Q(unread_count_neutral__gt=0) | Q(unread_count_positive__gt=0)) if not feed_ids: - usersubs = usersubs.filter(user=user_id, - active=True).only('feed', 'mark_read_date', 'is_trained', 'needs_unread_recalc') + usersubs = usersubs.filter(user=user_id, active=True).only( + "feed", "mark_read_date", "is_trained", "needs_unread_recalc" + ) else: - usersubs = usersubs.filter(user=user_id, - active=True, - feed__in=feed_ids).only('feed', 'mark_read_date', 'is_trained', 'needs_unread_recalc') - + usersubs = usersubs.filter(user=user_id, active=True, feed__in=feed_ids).only( + "feed", "mark_read_date", "is_trained", "needs_unread_recalc" + ) + return usersubs - + @classmethod - def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread", order="newest", - include_timestamps=False, group_by_feed=False, cutoff_date=None, - across_all_feeds=True, store_stories_key=None, offset=0, limit=500): + def story_hashes( + cls, + user_id, + feed_ids=None, + usersubs=None, + read_filter="unread", + order="newest", + include_timestamps=False, + group_by_feed=False, + cutoff_date=None, + across_all_feeds=True, + store_stories_key=None, + offset=0, + limit=500, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) pipeline = r.pipeline() user = User.objects.get(pk=user_id) story_hashes = {} if group_by_feed else [] is_archive = user.profile.is_archive - + if not feed_ids and not across_all_feeds: return story_hashes - + if not usersubs: usersubs = cls.subs_for_feeds(user_id, feed_ids=feed_ids, read_filter=read_filter) if not usersubs: @@ -130,12 +149,12 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread if not feed_ids: return story_hashes - current_time = int(time.time() + 60*60*24) + current_time = int(time.time() + 60 * 60 * 24) if not cutoff_date: cutoff_date = user.profile.unread_cutoff feed_counter = 0 unread_ranked_stories_keys = [] - + read_dates = dict() needs_unread_recalc = dict() manual_unread_pipeline = r.pipeline() @@ -143,9 +162,9 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread oldest_manual_unread = None # usersub_count = len(usersubs) for us in usersubs: - read_dates[us.feed_id] = int(max(us.mark_read_date, cutoff_date).strftime('%s')) + read_dates[us.feed_id] = int(max(us.mark_read_date, cutoff_date).strftime("%s")) if read_filter == "unread": - needs_unread_recalc[us.feed_id] = us.needs_unread_recalc # or usersub_count == 1 + needs_unread_recalc[us.feed_id] = us.needs_unread_recalc # or usersub_count == 1 user_manual_unread_stories_feed_key = f"uU:{user_id}:{us.feed_id}" manual_unread_pipeline.exists(user_manual_unread_stories_feed_key) user_unread_ranked_stories_key = f"zU:{user_id}:{us.feed_id}" @@ -153,25 +172,27 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread if read_filter == "unread": results = manual_unread_pipeline.execute() for i, us in enumerate(usersubs): - if results[i*2]: # user_manual_unread_stories_feed_key + if results[i * 2]: # user_manual_unread_stories_feed_key user_manual_unread_stories_feed_key = f"uU:{user_id}:{us.feed_id}" - oldest_manual_unread = r.zrevrange(user_manual_unread_stories_feed_key, -1, -1, withscores=True) + oldest_manual_unread = r.zrevrange( + user_manual_unread_stories_feed_key, -1, -1, withscores=True + ) manual_unread_feed_oldest_date[us.feed_id] = int(oldest_manual_unread[0][1]) - if read_filter == "unread" and not results[i*2+1]: # user_unread_ranked_stories_key + if read_filter == "unread" and not results[i * 2 + 1]: # user_unread_ranked_stories_key needs_unread_recalc[us.feed_id] = True - + for feed_id_group in chunks(feed_ids, 500): pipeline = r.pipeline() for feed_id in feed_id_group: - stories_key = 'F:%s' % feed_id - sorted_stories_key = 'zF:%s' % feed_id - read_stories_key = 'RS:%s:%s' % (user_id, feed_id) - unread_stories_key = 'U:%s:%s' % (user_id, feed_id) - unread_ranked_stories_key = 'zU:%s:%s' % (user_id, feed_id) + stories_key = "F:%s" % feed_id + sorted_stories_key = "zF:%s" % feed_id + read_stories_key = "RS:%s:%s" % (user_id, feed_id) + unread_stories_key = "U:%s:%s" % (user_id, feed_id) + unread_ranked_stories_key = "zU:%s:%s" % (user_id, feed_id) user_manual_unread_stories_feed_key = f"uU:{user_id}:{feed_id}" - + max_score = current_time - if read_filter == 'unread': + if read_filter == "unread": min_score = read_dates[feed_id] # if needs_unread_recalc[feed_id]: # pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key) @@ -180,53 +201,73 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread else: min_score = 0 - if order == 'oldest': + if order == "oldest": byscorefunc = pipeline.zrangebyscore else: byscorefunc = pipeline.zrevrangebyscore min_score, max_score = max_score, min_score ranked_stories_key = unread_ranked_stories_key - if read_filter == 'unread': + if read_filter == "unread": if needs_unread_recalc[feed_id]: pipeline.zdiffstore(unread_ranked_stories_key, [sorted_stories_key, read_stories_key]) # pipeline.expire(unread_ranked_stories_key, unread_cutoff_diff.days*24*60*60) - pipeline.expire(unread_ranked_stories_key, 1*60*60) # 1 hours - if order == 'oldest': - pipeline.zremrangebyscore(ranked_stories_key, 0, min_score-1) - pipeline.zremrangebyscore(ranked_stories_key, max_score+1, 2*max_score) + pipeline.expire(unread_ranked_stories_key, 1 * 60 * 60) # 1 hours + if order == "oldest": + pipeline.zremrangebyscore(ranked_stories_key, 0, min_score - 1) + pipeline.zremrangebyscore(ranked_stories_key, max_score + 1, 2 * max_score) else: - pipeline.zremrangebyscore(ranked_stories_key, 0, max_score-1) - pipeline.zremrangebyscore(ranked_stories_key, min_score+1, 2*min_score) + pipeline.zremrangebyscore(ranked_stories_key, 0, max_score - 1) + pipeline.zremrangebyscore(ranked_stories_key, min_score + 1, 2 * min_score) else: ranked_stories_key = sorted_stories_key - + # If archive premium user has manually marked an older story as unread if is_archive and feed_id in manual_unread_feed_oldest_date and read_filter == "unread": - if order == 'oldest': + if order == "oldest": min_score = manual_unread_feed_oldest_date[feed_id] else: max_score = manual_unread_feed_oldest_date[feed_id] - - pipeline.zunionstore(unread_ranked_stories_key, [unread_ranked_stories_key, user_manual_unread_stories_feed_key], aggregate="MAX") - + + pipeline.zunionstore( + unread_ranked_stories_key, + [unread_ranked_stories_key, user_manual_unread_stories_feed_key], + aggregate="MAX", + ) + if settings.DEBUG and False: debug_stories = r.zrevrange(unread_ranked_stories_key, 0, -1, withscores=True) - print((" ---> Story hashes (%s/%s - %s/%s) %s stories: %s" % ( - min_score, datetime.datetime.fromtimestamp(min_score).strftime('%Y-%m-%d %T'), - max_score, datetime.datetime.fromtimestamp(max_score).strftime('%Y-%m-%d %T'), - len(debug_stories), - debug_stories))) + print( + ( + " ---> Story hashes (%s/%s - %s/%s) %s stories: %s" + % ( + min_score, + datetime.datetime.fromtimestamp(min_score).strftime("%Y-%m-%d %T"), + max_score, + datetime.datetime.fromtimestamp(max_score).strftime("%Y-%m-%d %T"), + len(debug_stories), + debug_stories, + ) + ) + ) if not store_stories_key: - byscorefunc(ranked_stories_key, min_score, max_score, withscores=include_timestamps, start=offset, num=limit) + byscorefunc( + ranked_stories_key, + min_score, + max_score, + withscores=include_timestamps, + start=offset, + num=limit, + ) unread_ranked_stories_keys.append(ranked_stories_key) - + results = pipeline.execute() if not store_stories_key: for hashes in results: - if not isinstance(hashes, list): continue + if not isinstance(hashes, list): + continue if group_by_feed: story_hashes[feed_ids[feed_counter]] = hashes feed_counter += 1 @@ -241,10 +282,18 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread else: pipeline = r.pipeline() for unread_ranked_stories_keys_group in chunks(unread_ranked_stories_keys, chunk_size): - pipeline.zunionstore(f"{store_stories_key}-chunk{chunk_count}", unread_ranked_stories_keys_group, aggregate="MAX") + pipeline.zunionstore( + f"{store_stories_key}-chunk{chunk_count}", + unread_ranked_stories_keys_group, + aggregate="MAX", + ) chunk_count += 1 pipeline.execute() - r.zunionstore(store_stories_key, [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], aggregate="MAX") + r.zunionstore( + store_stories_key, + [f"{store_stories_key}-chunk{i}" for i in range(chunk_count)], + aggregate="MAX", + ) pipeline = r.pipeline() for i in range(chunk_count): pipeline.delete(f"{store_stories_key}-chunk{i}") @@ -252,39 +301,54 @@ def story_hashes(cls, user_id, feed_ids=None, usersubs=None, read_filter="unread if not store_stories_key: return story_hashes - - def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', cutoff_date=None): + + def get_stories(self, offset=0, limit=6, order="newest", read_filter="all", cutoff_date=None): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - unread_ranked_stories_key = 'zU:%s:%s' % (self.user_id, self.feed_id) + unread_ranked_stories_key = "zU:%s:%s" % (self.user_id, self.feed_id) if offset and r.exists(unread_ranked_stories_key): byscorefunc = r.zrevrange if order == "oldest": byscorefunc = r.zrange - story_hashes = byscorefunc(unread_ranked_stories_key, start=offset, end=offset+limit)[:limit] + story_hashes = byscorefunc(unread_ranked_stories_key, start=offset, end=offset + limit)[:limit] else: - story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk], - order=order, read_filter=read_filter, - offset=offset, limit=limit, - cutoff_date=cutoff_date) - - story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-') + story_hashes = UserSubscription.story_hashes( + self.user.pk, + feed_ids=[self.feed.pk], + order=order, + read_filter=read_filter, + offset=offset, + limit=limit, + cutoff_date=cutoff_date, + ) + + story_date_order = "%sstory_date" % ("" if order == "oldest" else "-") mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order) stories = Feed.format_stories(mstories) return stories - + @classmethod - def feed_stories(cls, user_id, feed_ids=None, offset=0, limit=6, - order='newest', read_filter='all', usersubs=None, cutoff_date=None, - all_feed_ids=None, cache_prefix=""): + def feed_stories( + cls, + user_id, + feed_ids=None, + offset=0, + limit=6, + order="newest", + read_filter="all", + usersubs=None, + cutoff_date=None, + all_feed_ids=None, + cache_prefix="", + ): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) across_all_feeds = False - - if order == 'oldest': + + if order == "oldest": range_func = rt.zrange else: range_func = rt.zrevrange - + if feed_ids is None: across_all_feeds = True feed_ids = [] @@ -292,17 +356,17 @@ def feed_stories(cls, user_id, feed_ids=None, offset=0, limit=6, all_feed_ids = [f for f in feed_ids] # feeds_string = "" - feeds_string = ','.join(str(f) for f in sorted(all_feed_ids))[:30] - ranked_stories_keys = '%szU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) - unread_ranked_stories_keys = '%szhU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) + feeds_string = ",".join(str(f) for f in sorted(all_feed_ids))[:30] + ranked_stories_keys = "%szU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) + unread_ranked_stories_keys = "%szhU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) stories_cached = rt.exists(ranked_stories_keys) unreads_cached = True if read_filter == "unread" else rt.exists(unread_ranked_stories_keys) if offset and stories_cached: - story_hashes = range_func(ranked_stories_keys, offset, offset+limit) + story_hashes = range_func(ranked_stories_keys, offset, offset + limit) if read_filter == "unread": unread_story_hashes = story_hashes elif unreads_cached: - unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset+limit) + unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset + limit) else: unread_story_hashes = [] return story_hashes, unread_story_hashes @@ -310,47 +374,55 @@ def feed_stories(cls, user_id, feed_ids=None, offset=0, limit=6, rt.delete(ranked_stories_keys) rt.delete(unread_ranked_stories_keys) - cls.story_hashes(user_id, feed_ids=feed_ids, - read_filter=read_filter, order=order, - include_timestamps=False, - usersubs=usersubs, - cutoff_date=cutoff_date, - across_all_feeds=across_all_feeds, - store_stories_key=ranked_stories_keys) + cls.story_hashes( + user_id, + feed_ids=feed_ids, + read_filter=read_filter, + order=order, + include_timestamps=False, + usersubs=usersubs, + cutoff_date=cutoff_date, + across_all_feeds=across_all_feeds, + store_stories_key=ranked_stories_keys, + ) story_hashes = range_func(ranked_stories_keys, offset, limit) if read_filter == "unread": unread_feed_story_hashes = story_hashes rt.zunionstore(unread_ranked_stories_keys, [ranked_stories_keys]) else: - cls.story_hashes(user_id, feed_ids=feed_ids, - read_filter="unread", order=order, - include_timestamps=True, - cutoff_date=cutoff_date, - store_stories_key=unread_ranked_stories_keys) + cls.story_hashes( + user_id, + feed_ids=feed_ids, + read_filter="unread", + order=order, + include_timestamps=True, + cutoff_date=cutoff_date, + store_stories_key=unread_ranked_stories_keys, + ) unread_feed_story_hashes = range_func(unread_ranked_stories_keys, offset, limit) - - rt.expire(ranked_stories_keys, 60*60) - rt.expire(unread_ranked_stories_keys, 60*60) - + + rt.expire(ranked_stories_keys, 60 * 60) + rt.expire(unread_ranked_stories_keys, 60 * 60) + return story_hashes, unread_feed_story_hashes - + def oldest_manual_unread_story_date(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + user_manual_unread_stories_feed_key = f"uU:{self.user_id}:{self.feed_id}" oldest_manual_unread = r.zrevrange(user_manual_unread_stories_feed_key, -1, -1, withscores=True) - + return oldest_manual_unread - + @classmethod def truncate_river(cls, user_id, feed_ids, read_filter, cache_prefix=""): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL) - - feeds_string = ','.join(str(f) for f in sorted(feed_ids))[:30] - ranked_stories_keys = '%szU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) - unread_ranked_stories_keys = '%szhU:%s:feeds:%s' % (cache_prefix, user_id, feeds_string) + + feeds_string = ",".join(str(f) for f in sorted(feed_ids))[:30] + ranked_stories_keys = "%szU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) + unread_ranked_stories_keys = "%szhU:%s:feeds:%s" % (cache_prefix, user_id, feeds_string) stories_cached = rt.exists(ranked_stories_keys) unreads_cached = rt.exists(unread_ranked_stories_keys) truncated = 0 @@ -359,27 +431,31 @@ def truncate_river(cls, user_id, feed_ids, read_filter, cache_prefix=""): rt.delete(ranked_stories_keys) # else: # logging.debug(" ***> ~FRNo stories cached, can't truncate: %s / %s" % (User.objects.get(pk=user_id), feed_ids)) - + if unreads_cached: truncated += rt.zcard(unread_ranked_stories_keys) rt.delete(unread_ranked_stories_keys) # else: # logging.debug(" ***> ~FRNo unread stories cached, can't truncate: %s / %s" % (User.objects.get(pk=user_id), feed_ids)) - + return truncated - + @classmethod - def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True, - skip_fetch=False): + def add_subscription( + cls, user, feed_address, folder=None, bookmarklet=False, auto_active=True, skip_fetch=False + ): feed = None us = None - - logging.user(user, "~FRAdding URL: ~SB%s (in %s) %s" % (feed_address, folder, - "~FCAUTO-ADD" if not auto_active else "")) - + + logging.user( + user, + "~FRAdding URL: ~SB%s (in %s) %s" + % (feed_address, folder, "~FCAUTO-ADD" if not auto_active else ""), + ) + feed = Feed.get_feed_from_url(feed_address, user=user) - if not feed: + if not feed: code = -1 if bookmarklet: message = "This site does not have an RSS feed. Nothing is linked to from this page." @@ -387,20 +463,19 @@ def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, au message = "This address does not point to an RSS feed or a website with an RSS feed." else: us, subscription_created = cls.objects.get_or_create( - feed=feed, + feed=feed, user=user, defaults={ - 'needs_unread_recalc': True, - 'active': auto_active, - } + "needs_unread_recalc": True, + "active": auto_active, + }, ) code = 1 message = "" - + if us: user_sub_folders_object, created = UserSubscriptionFolders.objects.get_or_create( - user=user, - defaults={'folders': '[]'} + user=user, defaults={"folders": "[]"} ) if created: user_sub_folders = [] @@ -409,85 +484,88 @@ def add_subscription(cls, user, feed_address, folder=None, bookmarklet=False, au user_sub_folders = add_object_to_folder(feed.pk, folder, user_sub_folders) user_sub_folders_object.folders = json.encode(user_sub_folders) user_sub_folders_object.save() - + if auto_active or user.profile.is_premium: us.active = True us.save() - + if not skip_fetch and feed.last_update < datetime.datetime.utcnow() - datetime.timedelta(days=1): feed = feed.update(verbose=True) - + from apps.social.models import MActivity + MActivity.new_feed_subscription(user_id=user.pk, feed_id=feed.pk, feed_title=feed.title) - + feed.setup_feed_for_premium_subscribers() feed.count_subscribers() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'reload:feeds') - - + r.publish(user.username, "reload:feeds") + return code, message, us - + @classmethod def feeds_with_updated_counts(cls, user, feed_ids=None, check_fetch_status=False, force=False): feeds = {} silent = not getattr(settings, "TEST_DEBUG", False) - + # Get subscriptions for user - user_subs = cls.objects.select_related('feed').filter(user=user, active=True) - feed_ids = [f for f in feed_ids if f and not any(f.startswith(prefix) for prefix in ['river', 'saved'])] + user_subs = cls.objects.select_related("feed").filter(user=user, active=True) + feed_ids = [ + f for f in feed_ids if f and not any(f.startswith(prefix) for prefix in ["river", "saved"]) + ] if feed_ids: user_subs = user_subs.filter(feed__in=feed_ids) - + for i, sub in enumerate(user_subs): # Count unreads if subscription is stale. - if (force or - sub.needs_unread_recalc or - sub.unread_count_updated < user.profile.unread_cutoff or - sub.oldest_unread_story_date < user.profile.unread_cutoff): + if ( + force + or sub.needs_unread_recalc + or sub.unread_count_updated < user.profile.unread_cutoff + or sub.oldest_unread_story_date < user.profile.unread_cutoff + ): sub = sub.calculate_feed_scores(silent=silent, force=force) - if not sub: continue # TODO: Figure out the correct sub and give it a new feed_id + if not sub: + continue # TODO: Figure out the correct sub and give it a new feed_id feed_id = sub.feed_id feeds[feed_id] = { - 'ps': sub.unread_count_positive, - 'nt': sub.unread_count_neutral, - 'ng': sub.unread_count_negative, - 'id': feed_id, + "ps": sub.unread_count_positive, + "nt": sub.unread_count_neutral, + "ng": sub.unread_count_negative, + "id": feed_id, } if not sub.feed.fetched_once or check_fetch_status: - feeds[feed_id]['fetched_once'] = sub.feed.fetched_once - feeds[feed_id]['not_yet_fetched'] = not sub.feed.fetched_once # Legacy. Dammit. + feeds[feed_id]["fetched_once"] = sub.feed.fetched_once + feeds[feed_id]["not_yet_fetched"] = not sub.feed.fetched_once # Legacy. Dammit. if sub.feed.favicon_fetching: - feeds[feed_id]['favicon_fetching'] = True + feeds[feed_id]["favicon_fetching"] = True if sub.feed.has_feed_exception or sub.feed.has_page_exception: - feeds[feed_id]['has_exception'] = True - feeds[feed_id]['exception_type'] = 'feed' if sub.feed.has_feed_exception else 'page' - feeds[feed_id]['feed_address'] = sub.feed.feed_address - feeds[feed_id]['exception_code'] = sub.feed.exception_code + feeds[feed_id]["has_exception"] = True + feeds[feed_id]["exception_type"] = "feed" if sub.feed.has_feed_exception else "page" + feeds[feed_id]["feed_address"] = sub.feed.feed_address + feeds[feed_id]["exception_code"] = sub.feed.exception_code return feeds - + @classmethod def queue_new_feeds(cls, user, new_feeds=None): if not isinstance(user, User): user = User.objects.get(pk=user) - + if not new_feeds: - new_feeds = cls.objects.filter(user=user, - feed__fetched_once=False, - active=True).values('feed_id') - new_feeds = list(set([f['feed_id'] for f in new_feeds])) - + new_feeds = cls.objects.filter(user=user, feed__fetched_once=False, active=True).values("feed_id") + new_feeds = list(set([f["feed_id"] for f in new_feeds])) + if not new_feeds: return - + logging.user(user, "~BB~FW~SBQueueing NewFeeds: ~FC(%s) %s" % (len(new_feeds), new_feeds)) size = 4 - for t in (new_feeds[pos:pos + size] for pos in range(0, len(new_feeds), size)): + for t in (new_feeds[pos : pos + size] for pos in range(0, len(new_feeds), size)): NewFeeds.apply_async(args=(t,), queue="new_feeds") - + @classmethod def refresh_stale_feeds(cls, user, exclude_new=False): if not isinstance(user, User): @@ -496,18 +574,21 @@ def refresh_stale_feeds(cls, user, exclude_new=False): stale_cutoff = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) # TODO: Refactor below using last_update from REDIS_FEED_UPDATE_POOL - stale_feeds = UserSubscription.objects.filter(user=user, active=True, feed__last_update__lte=stale_cutoff) + stale_feeds = UserSubscription.objects.filter( + user=user, active=True, feed__last_update__lte=stale_cutoff + ) if exclude_new: stale_feeds = stale_feeds.filter(feed__fetched_once=True) - all_feeds = UserSubscription.objects.filter(user=user, active=True) - - logging.user(user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % ( - stale_feeds.count(), all_feeds.count())) + all_feeds = UserSubscription.objects.filter(user=user, active=True) + + logging.user( + user, "~FG~BBRefreshing stale feeds: ~SB%s/%s" % (stale_feeds.count(), all_feeds.count()) + ) for sub in stale_feeds: sub.feed.fetched_once = False sub.feed.save() - + if stale_feeds: stale_feeds = list(set([f.feed_id for f in stale_feeds])) cls.queue_new_feeds(user, new_feeds=stale_feeds) @@ -515,10 +596,13 @@ def refresh_stale_feeds(cls, user, exclude_new=False): @classmethod def schedule_fetch_archive_feeds_for_user(cls, user_id): from apps.profile.tasks import FetchArchiveFeedsForUser - FetchArchiveFeedsForUser.apply_async(kwargs=dict(user_id=user_id), - queue='search_indexer', - time_limit=settings.MAX_SECONDS_COMPLETE_ARCHIVE_FETCH) - + + FetchArchiveFeedsForUser.apply_async( + kwargs=dict(user_id=user_id), + queue="search_indexer", + time_limit=settings.MAX_SECONDS_COMPLETE_ARCHIVE_FETCH, + ) + # Should be run as a background task @classmethod def fetch_archive_feeds_for_user(cls, user_id): @@ -527,12 +611,11 @@ def fetch_archive_feeds_for_user(cls, user_id): start_time = time.time() user = User.objects.get(pk=user_id) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'fetch_archive:start') + r.publish(user.username, "fetch_archive:start") - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() - feed_ids = [] starting_story_count = 0 for sub in subscriptions: @@ -541,25 +624,31 @@ def fetch_archive_feeds_for_user(cls, user_id): except Feed.DoesNotExist: continue starting_story_count += MStory.objects(story_feed_id=sub.feed.pk).count() - + feed_id_chunks = [c for c in chunks(feed_ids, 1)] - logging.user(user, "~FCFetching archive stories from ~SB%s feeds~SN in %s chunks..." % - (total, len(feed_id_chunks))) - - search_chunks = [FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk, - user_id=user_id - ).set(queue='search_indexer') - .set(time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED, - soft_time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED-30) - for feed_id_chunk in feed_id_chunks] - callback = FinishFetchArchiveFeeds.s(user_id=user_id, - start_time=start_time, - starting_story_count=starting_story_count).set(queue='search_indexer') + logging.user( + user, + "~FCFetching archive stories from ~SB%s feeds~SN in %s chunks..." % (total, len(feed_id_chunks)), + ) + + search_chunks = [ + FetchArchiveFeedsChunk.s(feed_ids=feed_id_chunk, user_id=user_id) + .set(queue="search_indexer") + .set( + time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED, + soft_time_limit=settings.MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED - 30, + ) + for feed_id_chunk in feed_id_chunks + ] + callback = FinishFetchArchiveFeeds.s( + user_id=user_id, start_time=start_time, starting_story_count=starting_story_count + ).set(queue="search_indexer") celery.chord(search_chunks)(callback) @classmethod def fetch_archive_feeds_chunk(cls, user_id, feed_ids): from apps.rss_feeds.models import Feed + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=user_id) @@ -567,18 +656,18 @@ def fetch_archive_feeds_chunk(cls, user_id, feed_ids): for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.fill_out_archive_stories() - - r.publish(user.username, 'fetch_archive:feeds:%s' % - ','.join([str(f) for f in feed_ids])) + + r.publish(user.username, "fetch_archive:feeds:%s" % ",".join([str(f) for f in feed_ids])) @classmethod def finish_fetch_archive_feeds(cls, user_id, start_time, starting_story_count): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=user_id) - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() duration = time.time() - start_time @@ -592,46 +681,52 @@ def finish_fetch_archive_feeds(cls, user_id, start_time, starting_story_count): continue new_story_count = ending_story_count - starting_story_count - logging.user(user, f"~FCFinished archive feed fetches for ~SB~FG{subscriptions.count()} feeds~FC~SN: ~FG~SB{new_story_count:,} new~SB~FC, ~FG{ending_story_count:,} total (pre-archive: {pre_archive_count:,} stories)") - - logging.user(user, "~FCFetched archive stories from ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % - (total, round(duration, 2))) - r.publish(user.username, 'fetch_archive:done') + logging.user( + user, + f"~FCFinished archive feed fetches for ~SB~FG{subscriptions.count()} feeds~FC~SN: ~FG~SB{new_story_count:,} new~SB~FC, ~FG{ending_story_count:,} total (pre-archive: {pre_archive_count:,} stories)", + ) + + logging.user( + user, + "~FCFetched archive stories from ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." + % (total, round(duration, 2)), + ) + r.publish(user.username, "fetch_archive:done") return ending_story_count, min(pre_archive_count, starting_story_count) - - + @classmethod def identify_deleted_feed_users(cls, old_feed_id): - users = UserSubscriptionFolders.objects.filter(folders__contains=old_feed_id).only('user') + users = UserSubscriptionFolders.objects.filter(folders__contains=old_feed_id).only("user") user_ids = [usf.user_id for usf in users] - f = open('utils/backups/users.txt', 'w') - f.write('\n'.join([str(u) for u in user_ids])) + f = open("utils/backups/users.txt", "w") + f.write("\n".join([str(u) for u in user_ids])) return user_ids @classmethod def recreate_deleted_feed(cls, new_feed_id, old_feed_id=None, skip=0): - user_ids = sorted([int(u) for u in open('utils/backups/users.txt').read().split('\n') if u]) - + user_ids = sorted([int(u) for u in open("utils/backups/users.txt").read().split("\n") if u]) + count = len(user_ids) - + for i, user_id in enumerate(user_ids): - if i < skip: continue + if i < skip: + continue if i % 1000 == 0: print("\n\n ------------------------------------------------") - print("\n ---> %s/%s (%s%%)" % (i, count, round(float(i)/count))) + print("\n ---> %s/%s (%s%%)" % (i, count, round(float(i) / count))) print("\n ------------------------------------------------\n") try: user = User.objects.get(pk=user_id) except User.DoesNotExist: print(" ***> %s has no account" % user_id) continue - us, created = UserSubscription.objects.get_or_create(user_id=user_id, feed_id=new_feed_id, defaults={ - 'needs_unread_recalc': True, - 'active': True, - 'is_trained': True - }) + us, created = UserSubscription.objects.get_or_create( + user_id=user_id, + feed_id=new_feed_id, + defaults={"needs_unread_recalc": True, "active": True, "is_trained": True}, + ) if not created: print(" ***> %s already subscribed" % user.username) try: @@ -639,7 +734,7 @@ def recreate_deleted_feed(cls, new_feed_id, old_feed_id=None, skip=0): usf.add_missing_feeds() except UserSubscriptionFolders.DoesNotExist: print(" ***> %s has no USF" % user.username) - + # Move classifiers if old_feed_id: classifier_count = 0 @@ -654,26 +749,30 @@ def recreate_deleted_feed(cls, new_feed_id, old_feed_id=None, skip=0): continue if classifier_count: print(" Moved %s classifiers for %s" % (classifier_count, user.username)) - + def trim_read_stories(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + read_stories_key = "RS:%s:%s" % (self.user_id, self.feed_id) stale_story_hashes = r.sdiff(read_stories_key, "F:%s" % self.feed_id) if not stale_story_hashes: return - - logging.user(self.user, "~FBTrimming ~FR%s~FB read stories (~SB%s~SN)..." % (len(stale_story_hashes), self.feed_id)) + + logging.user( + self.user, + "~FBTrimming ~FR%s~FB read stories (~SB%s~SN)..." % (len(stale_story_hashes), self.feed_id), + ) r.srem(read_stories_key, *stale_story_hashes) r.srem("RS:%s" % self.feed_id, *stale_story_hashes) - + @classmethod def trim_user_read_stories(self, user_id): user = User.objects.get(pk=user_id) r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - subs = UserSubscription.objects.filter(user_id=user_id).only('feed') - if not subs: return + subs = UserSubscription.objects.filter(user_id=user_id).only("feed") + if not subs: + return key = "RS:%s" % user_id feeds = [f.feed_id for f in subs] @@ -687,10 +786,10 @@ def trim_user_read_stories(self, user_id): # r.expire("%s:backup" % key, 60*60*24) r.sunionstore(key, *["%s:%s" % (key, f) for f in feeds]) new_rs = r.smembers(key) - + missing_rs = [] missing_count = 0 - feed_re = re.compile(r'(\d+):.*?') + feed_re = re.compile(r"(\d+):.*?") for i, rs in enumerate(old_rs): if i and i % 1000 == 0: if missing_rs: @@ -704,47 +803,56 @@ def trim_user_read_stories(self, user_id): rs_feed_id = found.groups()[0] if int(rs_feed_id) not in feeds: missing_rs.append(rs) - + if missing_rs: r.sadd(key, *missing_rs) - missing_count += len(missing_rs) + missing_count += len(missing_rs) new_count = len(new_rs) new_total = new_count + missing_count - logging.user(user, "~FBTrimming ~FR%s~FB/%s (~SB%s sub'ed ~SN+ ~SB%s unsub'ed~SN saved)" % - (old_count - new_total, old_count, new_count, missing_count)) - - + logging.user( + user, + "~FBTrimming ~FR%s~FB/%s (~SB%s sub'ed ~SN+ ~SB%s unsub'ed~SN saved)" + % (old_count - new_total, old_count, new_count, missing_count), + ) + def mark_feed_read(self, cutoff_date=None): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + recount = True # Use the latest story to get last read time. if cutoff_date: cutoff_date = cutoff_date + datetime.timedelta(seconds=1) else: now = datetime.datetime.now() - latest_story = MStory.objects(story_feed_id=self.feed.pk, - story_date__lte=now)\ - .order_by('-story_date').only('story_date').limit(1) + latest_story = ( + MStory.objects(story_feed_id=self.feed.pk, story_date__lte=now) + .order_by("-story_date") + .only("story_date") + .limit(1) + ) if latest_story and len(latest_story) >= 1: - cutoff_date = (latest_story[0]['story_date'] - + datetime.timedelta(seconds=1)) + cutoff_date = latest_story[0]["story_date"] + datetime.timedelta(seconds=1) else: cutoff_date = datetime.datetime.utcnow() recount = False - + if cutoff_date > self.mark_read_date or cutoff_date > self.oldest_unread_story_date: self.last_read_date = cutoff_date self.mark_read_date = cutoff_date self.oldest_unread_story_date = cutoff_date else: - logging.user(self.user, "Not marking %s as read: %s > %s/%s" % - (self, cutoff_date, self.mark_read_date, self.oldest_unread_story_date)) - + logging.user( + self.user, + "Not marking %s as read: %s > %s/%s" + % (self, cutoff_date, self.mark_read_date, self.oldest_unread_story_date), + ) + if not recount: self.unread_count_negative = 0 self.unread_count_positive = 0 @@ -753,58 +861,63 @@ def mark_feed_read(self, cutoff_date=None): self.needs_unread_recalc = False else: self.needs_unread_recalc = True - + self.save() - + return True - + def mark_newer_stories_read(self, cutoff_date): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + cutoff_date = cutoff_date - datetime.timedelta(seconds=1) - story_hashes = UserSubscription.story_hashes(self.user.pk, feed_ids=[self.feed.pk], - order="newest", read_filter="unread", - cutoff_date=cutoff_date) + story_hashes = UserSubscription.story_hashes( + self.user.pk, + feed_ids=[self.feed.pk], + order="newest", + read_filter="unread", + cutoff_date=cutoff_date, + ) data = self.mark_story_ids_as_read(story_hashes, aggregated=True) return data - - + def mark_story_ids_as_read(self, story_hashes, request=None, aggregated=False): data = dict(code=0, payload=story_hashes) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - + if not request: request = self.user - + if not self.needs_unread_recalc: self.needs_unread_recalc = True - self.save(update_fields=['needs_unread_recalc']) - + self.save(update_fields=["needs_unread_recalc"]) + if len(story_hashes) > 1: logging.user(request, "~FYRead %s stories in feed: %s" % (len(story_hashes), self.feed)) else: logging.user(request, "~FYRead story (%s) in feed: %s" % (story_hashes, self.feed)) RUserStory.aggregate_mark_read(self.feed_id) - - for story_hash in set(story_hashes): + + for story_hash in set(story_hashes): # logging.user(request, "~FYRead story: %s" % (story_hash)) RUserStory.mark_read(self.user_id, self.feed_id, story_hash, aggregated=aggregated) - r.publish(self.user.username, 'story:read:%s' % story_hash) + r.publish(self.user.username, "story:read:%s" % story_hash) if self.user.profile.is_archive: RUserUnreadStory.mark_read(self.user_id, story_hash) - r.publish(self.user.username, 'feed:%s' % self.feed_id) - + r.publish(self.user.username, "feed:%s" % self.feed_id) + self.last_read_date = datetime.datetime.now() - self.save(update_fields=['last_read_date']) - + self.save(update_fields=["last_read_date"]) + return data - + def invert_read_stories_after_unread_story(self, story, request=None): data = dict(code=1) unread_cutoff = self.user.profile.unread_cutoff @@ -820,33 +933,32 @@ def invert_read_stories_after_unread_story(self, story, request=None): story_hash=story.story_hash, story_date=story.story_date, ) - data['story_hashes'] = [story.story_hash] + data["story_hashes"] = [story.story_hash] return data - + # Story is outside the mark as read range, so invert all stories before. - newer_stories = MStory.objects(story_feed_id=story.story_feed_id, - story_date__gte=story.story_date, - story_date__lte=unread_cutoff - ).only('story_hash') + newer_stories = MStory.objects( + story_feed_id=story.story_feed_id, story_date__gte=story.story_date, story_date__lte=unread_cutoff + ).only("story_hash") newer_stories = [s.story_hash for s in newer_stories] self.mark_read_date = story.story_date - datetime.timedelta(minutes=1) self.needs_unread_recalc = True self.save() - + # Mark stories as read only after the mark_read_date has been moved, otherwise # these would be ignored. data = self.mark_story_ids_as_read(newer_stories, request=request, aggregated=True) - + return data - + def calculate_feed_scores(self, silent=False, stories=None, force=False): # now = datetime.datetime.strptime("2009-07-06 22:30:03", "%Y-%m-%d %H:%M:%S") now = datetime.datetime.now() oldest_unread_story_date = now - + if self.user.profile.last_seen_on < self.user.profile.unread_cutoff and not force: if not silent and settings.DEBUG: - logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed)) + logging.info(" ---> [%s] SKIPPING Computing scores: %s (1 week+)" % (self.user, self.feed)) return self ong = self.unread_count_negative ont = self.unread_count_neutral @@ -855,32 +967,35 @@ def calculate_feed_scores(self, silent=False, stories=None, force=False): ucu = self.unread_count_updated onur = self.needs_unread_recalc oit = self.is_trained - + # if not self.feed.fetched_once: # if not silent: # logging.info(' ---> [%s] NOT Computing scores: %s' % (self.user, self.feed)) # self.needs_unread_recalc = False # self.save() # return - + feed_scores = dict(negative=0, neutral=0, positive=0) - + # Two weeks in age. If mark_read_date is older, mark old stories as read. date_delta = self.user.profile.unread_cutoff if date_delta < self.mark_read_date: date_delta = self.mark_read_date else: self.mark_read_date = date_delta - + if self.is_trained: if not stories: - stories = cache.get('S:v3:%s' % self.feed_id) - - unread_story_hashes = self.story_hashes(user_id=self.user_id, feed_ids=[self.feed_id], - usersubs=[self], - read_filter='unread', - cutoff_date=self.user.profile.unread_cutoff) - + stories = cache.get("S:v3:%s" % self.feed_id) + + unread_story_hashes = self.story_hashes( + user_id=self.user_id, + feed_ids=[self.feed_id], + usersubs=[self], + read_filter="unread", + cutoff_date=self.user.profile.unread_cutoff, + ) + if not stories: try: stories_db = MStory.objects(story_hash__in=unread_story_hashes) @@ -891,112 +1006,144 @@ def calculate_feed_scores(self, silent=False, stories=None, force=False): except pymongo.errors.OperationFailure as e: stories_db = MStory.objects(story_hash__in=unread_story_hashes)[:25] stories = Feed.format_stories(stories_db, self.feed_id) - + unread_stories = [] for story in stories: # if story['story_date'] < date_delta: # continue - if story['story_hash'] in unread_story_hashes: + if story["story_hash"] in unread_story_hashes: unread_stories.append(story) - if story['story_date'] < oldest_unread_story_date: - oldest_unread_story_date = story['story_date'] + if story["story_date"] < oldest_unread_story_date: + oldest_unread_story_date = story["story_date"] # if not silent: # logging.info(' ---> [%s] Format stories: %s' % (self.user, datetime.datetime.now() - now)) - - classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0)) + + classifier_feeds = list( + MClassifierFeed.objects(user_id=self.user_id, feed_id=self.feed_id, social_user_id=0) + ) classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, feed_id=self.feed_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) - classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) - - if (not len(classifier_feeds) and - not len(classifier_authors) and - not len(classifier_titles) and - not len(classifier_tags)): + classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, feed_id=self.feed_id)) + classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, feed_id=self.feed_id)) + + if ( + not len(classifier_feeds) + and not len(classifier_authors) + and not len(classifier_titles) + and not len(classifier_tags) + ): self.is_trained = False - + # if not silent: # logging.info(' ---> [%s] Classifiers: %s (%s)' % (self.user, datetime.datetime.now() - now, classifier_feeds.count() + classifier_authors.count() + classifier_tags.count() + classifier_titles.count())) - + scores = { - 'feed': apply_classifier_feeds(classifier_feeds, self.feed), + "feed": apply_classifier_feeds(classifier_feeds, self.feed), } - + for story in unread_stories: - scores.update({ - 'author' : apply_classifier_authors(classifier_authors, story), - 'tags' : apply_classifier_tags(classifier_tags, story), - 'title' : apply_classifier_titles(classifier_titles, story), - }) - - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) + scores.update( + { + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), + } + ) + + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) if max_score > 0: - feed_scores['positive'] += 1 + feed_scores["positive"] += 1 elif min_score < 0: - feed_scores['negative'] += 1 + feed_scores["negative"] += 1 else: - if scores['feed'] > 0: - feed_scores['positive'] += 1 - elif scores['feed'] < 0: - feed_scores['negative'] += 1 + if scores["feed"] > 0: + feed_scores["positive"] += 1 + elif scores["feed"] < 0: + feed_scores["negative"] += 1 else: - feed_scores['neutral'] += 1 + feed_scores["neutral"] += 1 else: - unread_story_hashes = self.story_hashes(user_id=self.user_id, feed_ids=[self.feed_id], - usersubs=[self], - read_filter='unread', - include_timestamps=True, - cutoff_date=date_delta) - - feed_scores['neutral'] = len(unread_story_hashes) - if feed_scores['neutral']: + unread_story_hashes = self.story_hashes( + user_id=self.user_id, + feed_ids=[self.feed_id], + usersubs=[self], + read_filter="unread", + include_timestamps=True, + cutoff_date=date_delta, + ) + + feed_scores["neutral"] = len(unread_story_hashes) + if feed_scores["neutral"]: oldest_unread_story_date = datetime.datetime.fromtimestamp(unread_story_hashes[-1][1]) - + if not silent or settings.DEBUG: - logging.user(self.user, '~FBUnread count (~SB%s~SN%s): ~SN(~FC%s~FB/~FC%s~FB/~FC%s~FB) ~SBto~SN (~FC%s~FB/~FC%s~FB/~FC%s~FB)' % (self.feed_id, '/~FMtrained~FB' if self.is_trained else '', ong, ont, ops, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) + logging.user( + self.user, + "~FBUnread count (~SB%s~SN%s): ~SN(~FC%s~FB/~FC%s~FB/~FC%s~FB) ~SBto~SN (~FC%s~FB/~FC%s~FB/~FC%s~FB)" + % ( + self.feed_id, + "/~FMtrained~FB" if self.is_trained else "", + ong, + ont, + ops, + feed_scores["negative"], + feed_scores["neutral"], + feed_scores["positive"], + ), + ) - self.unread_count_positive = feed_scores['positive'] - self.unread_count_neutral = feed_scores['neutral'] - self.unread_count_negative = feed_scores['negative'] + self.unread_count_positive = feed_scores["positive"] + self.unread_count_neutral = feed_scores["neutral"] + self.unread_count_negative = feed_scores["negative"] self.unread_count_updated = datetime.datetime.now() self.oldest_unread_story_date = oldest_unread_story_date self.needs_unread_recalc = False - + update_fields = [] - if self.unread_count_positive != ops: update_fields.append('unread_count_positive') - if self.unread_count_neutral != ont: update_fields.append('unread_count_neutral') - if self.unread_count_negative != ong: update_fields.append('unread_count_negative') - if self.unread_count_updated != ucu: update_fields.append('unread_count_updated') - if self.oldest_unread_story_date != oousd: update_fields.append('oldest_unread_story_date') - if self.needs_unread_recalc != onur: update_fields.append('needs_unread_recalc') - if self.is_trained != oit: update_fields.append('is_trained') + if self.unread_count_positive != ops: + update_fields.append("unread_count_positive") + if self.unread_count_neutral != ont: + update_fields.append("unread_count_neutral") + if self.unread_count_negative != ong: + update_fields.append("unread_count_negative") + if self.unread_count_updated != ucu: + update_fields.append("unread_count_updated") + if self.oldest_unread_story_date != oousd: + update_fields.append("oldest_unread_story_date") + if self.needs_unread_recalc != onur: + update_fields.append("needs_unread_recalc") + if self.is_trained != oit: + update_fields.append("is_trained") if len(update_fields): self.save(update_fields=update_fields) - - if (self.unread_count_positive == 0 and - self.unread_count_neutral == 0): + + if self.unread_count_positive == 0 and self.unread_count_neutral == 0: self.mark_feed_read() - + if not silent: - logging.user(self.user, '~FC~SNComputing scores: %s (~SB%s~SN/~SB%s~SN/~SB%s~SN)' % (self.feed, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) - + logging.user( + self.user, + "~FC~SNComputing scores: %s (~SB%s~SN/~SB%s~SN/~SB%s~SN)" + % (self.feed, feed_scores["negative"], feed_scores["neutral"], feed_scores["positive"]), + ) + self.trim_read_stories() - + return self - + @staticmethod def score_story(scores): - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) if max_score > 0: return 1 elif min_score < 0: return -1 - return scores['feed'] - + return scores["feed"] + def switch_feed(self, new_feed, old_feed): # Rewrite feed in subscription folders try: @@ -1004,14 +1151,12 @@ def switch_feed(self, new_feed, old_feed): except Exception as e: logging.info(" *** ---> UserSubscriptionFolders error: %s" % e) return - + logging.info(" ===> %s " % self.user) # Switch read stories - RUserStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, - new_feed_id=new_feed.pk) - RUserUnreadStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, - new_feed_id=new_feed.pk) + RUserStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, new_feed_id=new_feed.pk) + RUserUnreadStory.switch_feed(user_id=self.user_id, old_feed_id=old_feed.pk, new_feed_id=new_feed.pk) def switch_feed_for_classifier(model): duplicates = model.objects(feed_id=old_feed.pk, user_id=self.user_id) @@ -1027,7 +1172,7 @@ def switch_feed_for_classifier(model): except (IntegrityError, OperationError): logging.info(" !!!!> %s already exists" % duplicate) duplicate.delete() - + switch_feed_for_classifier(MClassifierTitle) switch_feed_for_classifier(MClassifierAuthor) switch_feed_for_classifier(MClassifierFeed) @@ -1046,7 +1191,7 @@ def switch_feed_for_classifier(model): logging.info(" !!!!> %s already subscribed" % self.user) self.delete() return - + @classmethod def collect_orphan_feeds(cls, user): us = cls.objects.filter(user=user) @@ -1056,7 +1201,7 @@ def collect_orphan_feeds(cls, user): return us_feed_ids = set([sub.feed_id for sub in us]) folders = json.decode(usf.folders) - + def collect_ids(folders, found_ids): for item in folders: # print ' --> %s' % item @@ -1071,10 +1216,14 @@ def collect_ids(folders, found_ids): found_ids.update(collect_ids(item, found_ids)) # print ' --> Returning: %s' % found_ids return found_ids + found_ids = collect_ids(folders, set()) diff = len(us_feed_ids) - len(found_ids) if diff > 0: - logging.info(" ---> Collecting orphans on %s. %s feeds with %s orphans" % (user.username, len(us_feed_ids), diff)) + logging.info( + " ---> Collecting orphans on %s. %s feeds with %s orphans" + % (user.username, len(us_feed_ids), diff) + ) orphan_ids = us_feed_ids - found_ids folders.extend(list(orphan_ids)) usf.folders = json.encode(folders) @@ -1092,7 +1241,7 @@ def all_subs_needs_unread_recalc(cls, user_id): needed_recalc += 1 logging.debug(f" ---> Relcaculated {needed_recalc} of {total} subscriptions for user_id: {user_id}") - + @classmethod def verify_feeds_scheduled(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) @@ -1102,13 +1251,13 @@ def verify_feeds_scheduled(cls, user_id): p = r.pipeline() for feed_id in feed_ids: - p.zscore('scheduled_updates', feed_id) - p.zscore('error_feeds', feed_id) + p.zscore("scheduled_updates", feed_id) + p.zscore("error_feeds", feed_id) results = p.execute() - + p = r.pipeline() for feed_id in feed_ids: - p.zscore('queued_feeds', feed_id) + p.zscore("queued_feeds", feed_id) try: results_queued = p.execute() except: @@ -1116,13 +1265,14 @@ def verify_feeds_scheduled(cls, user_id): safety_net = [] for f, feed_id in enumerate(feed_ids): - scheduled_updates = results[f*2] - error_feeds = results[f*2+1] + scheduled_updates = results[f * 2] + error_feeds = results[f * 2 + 1] queued_feeds = results_queued[f] if not scheduled_updates and not queued_feeds and not error_feeds: safety_net.append(feed_id) - if not safety_net: return + if not safety_net: + return logging.user(user, "~FBFound ~FR%s unscheduled feeds~FB, scheduling immediately..." % len(safety_net)) for feed_id in safety_net: @@ -1132,12 +1282,18 @@ def verify_feeds_scheduled(cls, user_id): @classmethod def count_subscribers_to_other_subscriptions(cls, feed_id): # feeds = defaultdict(int) - subscribing_users = cls.objects.filter(feed=feed_id).values('user', 'feed_opens').order_by('-feed_opens')[:25] + subscribing_users = ( + cls.objects.filter(feed=feed_id).values("user", "feed_opens").order_by("-feed_opens")[:25] + ) print("Got subscribing users") - subscribing_user_ids = [sub['user'] for sub in subscribing_users] + subscribing_user_ids = [sub["user"] for sub in subscribing_users] print("Got subscribing user ids") - cofeeds = cls.objects.filter(user__in=subscribing_user_ids).values('feed').annotate( - user_count=Count('user')).order_by('-user_count')[:200] + cofeeds = ( + cls.objects.filter(user__in=subscribing_user_ids) + .values("feed") + .annotate(user_count=Count("user")) + .order_by("-user_count")[:200] + ) print("Got cofeeds: %s" % len(cofeeds)) # feed_subscribers = Feed.objects.filter(pk__in=[f['feed'] for f in cofeeds]).values('pk', 'num_subscribers') # max_local_subscribers = float(max([f['user_count'] for f in cofeeds])) @@ -1155,24 +1311,25 @@ def count_subscribers_to_other_subscriptions(cls, feed_id): # pprint([(Feed.get_by_id(o[0]), o[1], o[2], o[3], o[4]) for o in orderedpctfeeds]) users_by_feeds = {} - for feed in [f['feed'] for f in cofeeds]: - users_by_feeds[feed] = [u['user'] for u in cls.objects.filter(feed=feed, user__in=subscribing_user_ids).values('user')] + for feed in [f["feed"] for f in cofeeds]: + users_by_feeds[feed] = [ + u["user"] for u in cls.objects.filter(feed=feed, user__in=subscribing_user_ids).values("user") + ] print("Got users_by_feeds") - + table = tfidf() for feed in list(users_by_feeds.keys()): table.addDocument(feed, users_by_feeds[feed]) print("Got table") - + sorted_table = sorted(table.similarities(subscribing_user_ids), key=itemgetter(1), reverse=True)[:8] pprint([(Feed.get_by_id(o[0]), o[1]) for o in sorted_table]) - + return table # return cofeeds class RUserStory: - @classmethod def mark_story_hashes_read(cls, user_id, story_hashes, username=None, r=None, s=None): if not r: @@ -1182,32 +1339,40 @@ def mark_story_hashes_read(cls, user_id, story_hashes, username=None, r=None, s= ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) if not username: username = User.objects.get(pk=user_id).username - + p = r.pipeline() feed_ids = set() friend_ids = set() - + if not isinstance(story_hashes, list): story_hashes = [story_hashes] - + single_story = len(story_hashes) == 1 - + for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) feed_ids.add(feed_id) - + if single_story: cls.aggregate_mark_read(feed_id) - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (user_id) share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)] friend_ids.update(friends_with_shares) - cls.mark_read(user_id, feed_id, story_hash, social_user_ids=friends_with_shares, r=p, username=username, ps=ps) - + cls.mark_read( + user_id, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + r=p, + username=username, + ps=ps, + ) + p.execute() - + return list(feed_ids), list(friend_ids) @classmethod @@ -1218,7 +1383,7 @@ def mark_story_hash_unread(cls, user, story_hash, r=None, s=None, ps=None): s = redis.Redis(connection_pool=settings.REDIS_POOL) if not ps: ps = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - + friend_ids = set() feed_id, _ = MStory.split_story_hash(story_hash) @@ -1227,52 +1392,69 @@ def mark_story_hash_unread(cls, user, story_hash, r=None, s=None, ps=None): share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in s.sinter(share_key, friend_key)] friend_ids.update(friends_with_shares) - cls.mark_unread(user.pk, feed_id, story_hash, social_user_ids=friends_with_shares, r=r, - username=user.username, ps=ps) - + cls.mark_unread( + user.pk, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + r=r, + username=user.username, + ps=ps, + ) + return feed_id, list(friend_ids) - + @classmethod def aggregate_mark_read(cls, feed_id): if not feed_id: logging.debug(" ***> ~BR~FWNo feed_id on aggregate mark read. Ignoring.") return - + r = redis.Redis(connection_pool=settings.REDIS_FEED_READ_POOL) - week_of_year = datetime.datetime.now().strftime('%Y-%U') + week_of_year = datetime.datetime.now().strftime("%Y-%U") feed_read_key = "fR:%s:%s" % (feed_id, week_of_year) - + r.incr(feed_read_key) # This settings.DAYS_OF_STORY_HASHES doesn't need to consider potential pro subscribers # because the feed_read_key is really only used for statistics and not unreads - r.expire(feed_read_key, 2*settings.DAYS_OF_STORY_HASHES*24*60*60) - + r.expire(feed_read_key, 2 * settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) + @classmethod - def mark_read(cls, user_id, story_feed_id, story_hash, social_user_ids=None, - aggregated=False, r=None, username=None, ps=None): + def mark_read( + cls, + user_id, + story_feed_id, + story_hash, + social_user_ids=None, + aggregated=False, + r=None, + username=None, + ps=None, + ): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id) - if not story_hash: return - + if not story_hash: + return + def redis_commands(key): r.sadd(key, story_hash) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) - all_read_stories_key = 'RS:%s' % (user_id) + all_read_stories_key = "RS:%s" % (user_id) redis_commands(all_read_stories_key) - - read_story_key = 'RS:%s:%s' % (user_id, story_feed_id) + + read_story_key = "RS:%s:%s" % (user_id, story_feed_id) redis_commands(read_story_key) - + if ps and username: - ps.publish(username, 'story:read:%s' % story_hash) - + ps.publish(username, "story:read:%s" % story_hash) + if social_user_ids: for social_user_id in social_user_ids: - social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id) + social_read_story_key = "RS:%s:B:%s" % (user_id, social_user_id) redis_commands(social_read_story_key) feed_id, _ = MStory.split_story_hash(story_hash) @@ -1282,13 +1464,13 @@ def redis_commands(key): # unread_ranked_stories_key = f"zU:{user_id}:{story_feed_id}" # r.srem(unread_stories_key, story_hash) # r.zrem(unread_ranked_stories_key, story_hash) - + if not aggregated: - key = 'lRS:%s' % user_id + key = "lRS:%s" % user_id r.lpush(key, story_hash) r.ltrim(key, 0, 1000) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) - + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) + @staticmethod def story_can_be_marked_unread_by_user(story, user): message = None @@ -1297,44 +1479,51 @@ def story_can_be_marked_unread_by_user(story, user): # message = "Story is more than %s days old, change your days of unreads under Preferences." % ( # user.profile.days_of_unread) if user.profile.is_premium: - message = "Story is more than %s days old. Premium Archive accounts can mark any story as unread." % ( - settings.DAYS_OF_UNREAD) + message = ( + "Story is more than %s days old. Premium Archive accounts can mark any story as unread." + % (settings.DAYS_OF_UNREAD) + ) elif story.story_date > user.profile.unread_cutoff_premium: - message = "Story is older than %s days. Premium has %s days, and Premium Archive can mark anything unread." % ( - settings.DAYS_OF_UNREAD_FREE, settings.DAYS_OF_UNREAD) + message = ( + "Story is older than %s days. Premium has %s days, and Premium Archive can mark anything unread." + % (settings.DAYS_OF_UNREAD_FREE, settings.DAYS_OF_UNREAD) + ) else: - message = "Story is more than %s days old, only Premium Archive can mark older stories unread." % ( - settings.DAYS_OF_UNREAD_FREE) + message = ( + "Story is more than %s days old, only Premium Archive can mark older stories unread." + % (settings.DAYS_OF_UNREAD_FREE) + ) return message - + @staticmethod def mark_unread(user_id, story_feed_id, story_hash, social_user_ids=None, r=None, username=None, ps=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=story_feed_id) - - if not story_hash: return - + + if not story_hash: + return + def redis_commands(key): r.srem(key, story_hash) - r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id)*24*60*60) + r.expire(key, Feed.days_of_story_hashes_for_feed(story_feed_id) * 24 * 60 * 60) - all_read_stories_key = 'RS:%s' % (user_id) + all_read_stories_key = "RS:%s" % (user_id) redis_commands(all_read_stories_key) - - read_story_key = 'RS:%s:%s' % (user_id, story_feed_id) + + read_story_key = "RS:%s:%s" % (user_id, story_feed_id) redis_commands(read_story_key) - - read_stories_list_key = 'lRS:%s' % user_id + + read_stories_list_key = "lRS:%s" % user_id r.lrem(read_stories_list_key, 1, story_hash) - + if ps and username: - ps.publish(username, 'story:unread:%s' % story_hash) - + ps.publish(username, "story:unread:%s" % story_hash) + if social_user_ids: for social_user_id in social_user_ids: - social_read_story_key = 'RS:%s:B:%s' % (user_id, social_user_id) + social_read_story_key = "RS:%s:B:%s" % (user_id, social_user_id) redis_commands(social_read_story_key) @staticmethod @@ -1343,51 +1532,52 @@ def get_stories(user_id, feed_id, r=None): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) story_hashes = r.smembers("RS:%s:%s" % (user_id, feed_id)) return story_hashes - + @staticmethod def get_read_stories(user_id, offset=0, limit=12, order="newest"): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) key = "lRS:%s" % user_id - + if order == "oldest": count = r.llen(key) - if offset >= count: return [] - offset = max(0, count - (offset+limit)) - story_hashes = r.lrange(key, offset, offset+limit) + if offset >= count: + return [] + offset = max(0, count - (offset + limit)) + story_hashes = r.lrange(key, offset, offset + limit) elif order == "newest": - story_hashes = r.lrange(key, offset, offset+limit) - + story_hashes = r.lrange(key, offset, offset + limit) + return story_hashes - + @classmethod def switch_feed(cls, user_id, old_feed_id, new_feed_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + story_hashes = UserSubscription.story_hashes(user_id, feed_ids=[old_feed_id]) # story_hashes = cls.get_stories(user_id, old_feed_id, r=r) - + for story_hash in story_hashes: _, hash_story = MStory.split_story_hash(story_hash) new_story_hash = "%s:%s" % (new_feed_id, hash_story) read_feed_key = "RS:%s:%s" % (user_id, new_feed_id) p.sadd(read_feed_key, new_story_hash) - p.expire(read_feed_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60) + p.expire(read_feed_key, Feed.days_of_story_hashes_for_feed(new_feed_id) * 24 * 60 * 60) read_user_key = "RS:%s" % (user_id) p.sadd(read_user_key, new_story_hash) - p.expire(read_user_key, Feed.days_of_story_hashes_for_feed(new_feed_id)*24*60*60) - + p.expire(read_user_key, Feed.days_of_story_hashes_for_feed(new_feed_id) * 24 * 60 * 60) + p.execute() - + if len(story_hashes) > 0: logging.info(" ---> %s read stories" % len(story_hashes)) - + @classmethod def switch_hash(cls, feed, old_hash, new_hash): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + usersubs = UserSubscription.objects.filter(feed_id=feed.pk, last_read_date__gte=feed.unread_cutoff) logging.info(" ---> ~SB%s usersubs~SN to switch read story hashes..." % len(usersubs)) for sub in usersubs: @@ -1395,14 +1585,14 @@ def switch_hash(cls, feed, old_hash, new_hash): read = r.sismember(rs_key, old_hash) if read: p.sadd(rs_key, new_hash) - p.expire(rs_key, feed.days_of_story_hashes*24*60*60) - + p.expire(rs_key, feed.days_of_story_hashes * 24 * 60 * 60) + read_user_key = "RS:%s" % sub.user.pk p.sadd(read_user_key, new_hash) - p.expire(read_user_key, feed.days_of_story_hashes*24*60*60) - + p.expire(read_user_key, feed.days_of_story_hashes * 24 * 60 * 60) + p.execute() - + @classmethod def read_story_count(cls, user_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) @@ -1410,22 +1600,27 @@ def read_story_count(cls, user_id): count = r.scard(key) return count + class UserSubscriptionFolders(models.Model): """ A JSON list of folders and feeds for while a user has subscribed. The list is a recursive descent of feeds and folders in folders. Used to layout the feeds and folders in the Reader's feed navigation pane. """ + user = models.OneToOneField(User, on_delete=models.CASCADE) folders = models.TextField(default="[]") - + def __str__(self): - return "[%s]: %s" % (self.user, len(self.folders),) - + return "[%s]: %s" % ( + self.user, + len(self.folders), + ) + class Meta: verbose_name_plural = "folders" verbose_name = "folder" - + @classmethod def compact_for_user(cls, user_id): user = User.objects.get(pk=user_id) @@ -1433,12 +1628,12 @@ def compact_for_user(cls, user_id): usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return - + usf.compact() - + def compact(self): folders = json.decode(self.folders) - + def _compact(folder): new_folder = [] for item in folder: @@ -1449,7 +1644,9 @@ def _compact(folder): # Check every existing folder at that level to see if it already exists for ef, existing_folder in enumerate(new_folder): if type(existing_folder) == dict and list(existing_folder.keys())[0] == f_k: - existing_folder_feed_ids = [f for f in list(existing_folder.values())[0] if type(f) == int] + existing_folder_feed_ids = [ + f for f in list(existing_folder.values())[0] if type(f) == int + ] merged = [] for merge_val in existing_folder_feed_ids: merged.append(merge_val) @@ -1460,19 +1657,23 @@ def _compact(folder): else: merged.append(merge_val) if f_v != existing_folder_feed_ids: - logging.info(f" ---> ~FRFound repeat folder: {f_k} \n\t" - f"~FBExisting: {f_v}\n\t" - f"~FCMerging: {list(existing_folder.values())[0]}\n\t" - f"~FYBecomes: {merged}") + logging.info( + f" ---> ~FRFound repeat folder: {f_k} \n\t" + f"~FBExisting: {f_v}\n\t" + f"~FCMerging: {list(existing_folder.values())[0]}\n\t" + f"~FYBecomes: {merged}" + ) new_folder[ef] = {f_k: _compact(merged)} else: - logging.info(f" ---> ~FRFound repeat folder ~FY{f_k}~FR, no difference in feeds") + logging.info( + f" ---> ~FRFound repeat folder ~FY{f_k}~FR, no difference in feeds" + ) break else: # If no match, then finally we can add the folder new_folder.append({f_k: _compact(f_v)}) return new_folder - + new_folders = _compact(folders) compact_msg = " ---> Compacting from %s to %s" % (folders, new_folders) new_folders = json.encode(new_folders) @@ -1481,7 +1682,7 @@ def _compact(folder): logging.info(" ---> Compacting from %s bytes to %s bytes" % (len(self.folders), len(new_folders))) self.folders = new_folders self.save() - + def add_folder(self, parent_folder, folder): if self.folders: user_sub_folders = json.decode(self.folders) @@ -1491,9 +1692,10 @@ def add_folder(self, parent_folder, folder): user_sub_folders = add_object_to_folder(obj, parent_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + def arranged_folders(self): user_sub_folders = json.decode(self.folders) + def _arrange_folder(folder): folder_feeds = [] folder_folders = [] @@ -1507,22 +1709,20 @@ def _arrange_folder(folder): arranged_folder = folder_feeds + folder_folders return arranged_folder - + return _arrange_folder(user_sub_folders) - + def flatten_folders(self, feeds=None, inactive_feeds=None): folders = json.decode(self.folders) flat_folders = {" ": []} if feeds and not inactive_feeds: inactive_feeds = [] - + def _flatten_folders(items, parent_folder="", depth=0): for item in items: - if (isinstance(item, int) and - (not feeds or - (item in feeds or item in inactive_feeds))): + if isinstance(item, int) and (not feeds or (item in feeds or item in inactive_feeds)): if not parent_folder: - parent_folder = ' ' + parent_folder = " " if parent_folder in flat_folders: flat_folders[parent_folder].append(item) else: @@ -1531,42 +1731,49 @@ def _flatten_folders(items, parent_folder="", depth=0): for folder_name in item: folder = item[folder_name] flat_folder_name = "%s%s%s" % ( - parent_folder if parent_folder and parent_folder != ' ' else "", - " - " if parent_folder and parent_folder != ' ' else "", - folder_name + parent_folder if parent_folder and parent_folder != " " else "", + " - " if parent_folder and parent_folder != " " else "", + folder_name, ) flat_folders[flat_folder_name] = [] - _flatten_folders(folder, flat_folder_name, depth+1) - + _flatten_folders(folder, flat_folder_name, depth + 1) + _flatten_folders(folders) - + return flat_folders def delete_feed(self, feed_id, in_folder, commit_delete=True): feed_id = int(feed_id) - def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, deleted=False): + + def _find_feed_in_folders(old_folders, folder_name="", multiples_found=False, deleted=False): new_folders = [] for k, folder in enumerate(old_folders): if isinstance(folder, int): - if (folder == feed_id and in_folder is not None and ( - (in_folder not in folder_name) or - (in_folder in folder_name and deleted))): + if ( + folder == feed_id + and in_folder is not None + and ((in_folder not in folder_name) or (in_folder in folder_name and deleted)) + ): multiples_found = True - logging.user(self.user, "~FB~SBDeleting feed, and a multiple has been found in '%s' / '%s' %s" % (folder_name, in_folder, '(deleted)' if deleted else '')) - if (folder == feed_id and - (in_folder is None or in_folder in folder_name) and - not deleted): - logging.user(self.user, "~FBDelete feed: %s'th item: %s folders/feeds" % ( - k, len(old_folders) - )) + logging.user( + self.user, + "~FB~SBDeleting feed, and a multiple has been found in '%s' / '%s' %s" + % (folder_name, in_folder, "(deleted)" if deleted else ""), + ) + if folder == feed_id and (in_folder is None or in_folder in folder_name) and not deleted: + logging.user( + self.user, "~FBDelete feed: %s'th item: %s folders/feeds" % (k, len(old_folders)) + ) deleted = True else: new_folders.append(folder) elif isinstance(folder, dict): for f_k, f_v in list(folder.items()): - nf, multiples_found, deleted = _find_feed_in_folders(f_v, f_k, multiples_found, deleted) + nf, multiples_found, deleted = _find_feed_in_folders( + f_v, f_k, multiples_found, deleted + ) new_folders.append({f_k: nf}) - + return new_folders, multiples_found, deleted user_sub_folders = self.arranged_folders() @@ -1582,8 +1789,7 @@ def _find_feed_in_folders(old_folders, folder_name='', multiples_found=False, de duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: try: - user_sub = UserSubscription.objects.get(user=self.user, - feed=duplicate_feed[0].feed) + user_sub = UserSubscription.objects.get(user=self.user, feed=duplicate_feed[0].feed) except (Feed.DoesNotExist, UserSubscription.DoesNotExist): return if user_sub: @@ -1600,30 +1806,38 @@ def _find_folder_in_folders(old_folders, folder_name, feeds_to_delete, deleted_f elif isinstance(folder, dict): for f_k, f_v in list(folder.items()): if f_k == folder_to_delete and (in_folder in folder_name or in_folder is None): - logging.user(self.user, "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder)) + logging.user( + self.user, + "~FBDeleting folder '~SB%s~SN' in '%s': %s" % (f_k, folder_name, folder), + ) deleted_folder = folder else: - nf, feeds_to_delete, deleted_folder = _find_folder_in_folders(f_v, f_k, feeds_to_delete, deleted_folder) + nf, feeds_to_delete, deleted_folder = _find_folder_in_folders( + f_v, f_k, feeds_to_delete, deleted_folder + ) new_folders.append({f_k: nf}) - + return new_folders, feeds_to_delete, deleted_folder - + user_sub_folders = json.decode(self.folders) - user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders(user_sub_folders, '', feed_ids_in_folder) + user_sub_folders, feeds_to_delete, deleted_folder = _find_folder_in_folders( + user_sub_folders, "", feed_ids_in_folder + ) self.folders = json.encode(user_sub_folders) self.save() if commit_delete: UserSubscription.objects.filter(user=self.user, feed__in=feeds_to_delete).delete() - + return deleted_folder def delete_feeds_by_folder(self, feeds_by_folder): - logging.user(self.user, "~FBDeleting ~FR~SB%s~SN feeds~FB: ~SB%s" % ( - len(feeds_by_folder), feeds_by_folder)) + logging.user( + self.user, "~FBDeleting ~FR~SB%s~SN feeds~FB: ~SB%s" % (len(feeds_by_folder), feeds_by_folder) + ) for feed_id, in_folder in feeds_by_folder: self.delete_feed(feed_id, in_folder) - + return self def rename_folder(self, folder_to_rename, new_folder_name, in_folder): @@ -1636,21 +1850,25 @@ def _find_folder_in_folders(old_folders, folder_name): for f_k, f_v in list(folder.items()): nf = _find_folder_in_folders(f_v, f_k) if f_k == folder_to_rename and in_folder in folder_name: - logging.user(self.user, "~FBRenaming folder '~SB%s~SN' in '%s' to: ~SB%s" % ( - f_k, folder_name, new_folder_name)) + logging.user( + self.user, + "~FBRenaming folder '~SB%s~SN' in '%s' to: ~SB%s" + % (f_k, folder_name, new_folder_name), + ) f_k = new_folder_name new_folders.append({f_k: nf}) - + return new_folders - + user_sub_folders = json.decode(self.folders) - user_sub_folders = _find_folder_in_folders(user_sub_folders, '') + user_sub_folders = _find_folder_in_folders(user_sub_folders, "") self.folders = json.encode(user_sub_folders) self.save() - + def move_feed_to_folders(self, feed_id, in_folders=None, to_folders=None): - logging.user(self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % ( - feed_id, in_folders, to_folders)) + logging.user( + self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % (feed_id, in_folders, to_folders) + ) user_sub_folders = json.decode(self.folders) for in_folder in in_folders: self.delete_feed(feed_id, in_folder, commit_delete=False) @@ -1659,46 +1877,49 @@ def move_feed_to_folders(self, feed_id, in_folders=None, to_folders=None): user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self def move_feed_to_folder(self, feed_id, in_folder=None, to_folder=None): - logging.user(self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % ( - feed_id, in_folder, to_folder)) + logging.user( + self.user, "~FBMoving feed '~SB%s~SN' in '%s' to: ~SB%s" % (feed_id, in_folder, to_folder) + ) user_sub_folders = json.decode(self.folders) self.delete_feed(feed_id, in_folder, commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(int(feed_id), to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self def move_folder_to_folder(self, folder_name, in_folder=None, to_folder=None): - logging.user(self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % ( - folder_name, in_folder, to_folder)) + logging.user( + self.user, "~FBMoving folder '~SB%s~SN' in '%s' to: ~SB%s" % (folder_name, in_folder, to_folder) + ) user_sub_folders = json.decode(self.folders) deleted_folder = self.delete_folder(folder_name, in_folder, [], commit_delete=False) user_sub_folders = json.decode(self.folders) user_sub_folders = add_object_to_folder(deleted_folder, to_folder, user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + return self - + def move_feeds_by_folder_to_folder(self, feeds_by_folder, to_folder): - logging.user(self.user, "~FBMoving ~SB%s~SN feeds to folder: ~SB%s" % ( - len(feeds_by_folder), to_folder)) + logging.user( + self.user, "~FBMoving ~SB%s~SN feeds to folder: ~SB%s" % (len(feeds_by_folder), to_folder) + ) for feed_id, in_folder in feeds_by_folder: feed_id = int(feed_id) self.move_feed_to_folder(feed_id, in_folder, to_folder) - + return self - + def rewrite_feed(self, original_feed, duplicate_feed): def rewrite_folders(folders, original_feed, duplicate_feed): new_folders = [] - + for k, folder in enumerate(folders): if isinstance(folder, int): if folder == duplicate_feed.pk: @@ -1711,15 +1932,15 @@ def rewrite_folders(folders, original_feed, duplicate_feed): new_folders.append({f_k: rewrite_folders(f_v, original_feed, duplicate_feed)}) return new_folders - + folders = json.decode(self.folders) folders = rewrite_folders(folders, original_feed, duplicate_feed) self.folders = json.encode(folders) self.save() - + def flat(self): folders = json.decode(self.folders) - + def _flat(folder, feeds=None): if not feeds: feeds = [] @@ -1732,10 +1953,10 @@ def _flat(folder, feeds=None): return feeds return _flat(folders) - + def feed_ids_under_folder_slug(self, slug): folders = json.decode(self.folders) - + def _feeds(folder, found=False, folder_title=None): feeds = [] local_found = False @@ -1756,16 +1977,16 @@ def _feeds(folder, found=False, folder_title=None): return feeds, folder_title return _feeds(folders) - + @classmethod def add_all_missing_feeds(cls): - usf = cls.objects.all().order_by('pk') + usf = cls.objects.all().order_by("pk") total = usf.count() - + for i, f in enumerate(usf): print("%s/%s: %s" % (i, total, f)) f.add_missing_feeds() - + @classmethod def add_missing_feeds_for_user(cls, user_id): user = User.objects.get(pk=user_id) @@ -1773,62 +1994,67 @@ def add_missing_feeds_for_user(cls, user_id): usf = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: return - + usf.add_missing_feeds() - + def add_missing_feeds(self): all_feeds = self.flat() - subs = [us.feed_id for us in - UserSubscription.objects.filter(user=self.user).only('feed')] - + subs = [us.feed_id for us in UserSubscription.objects.filter(user=self.user).only("feed")] + missing_subs = set(all_feeds) - set(subs) if missing_subs: - logging.debug(" ---> %s is missing %s subs. Adding %s..." % ( - self.user, len(missing_subs), missing_subs)) + logging.debug( + " ---> %s is missing %s subs. Adding %s..." % (self.user, len(missing_subs), missing_subs) + ) for feed_id in missing_subs: feed = Feed.get_by_id(feed_id) if feed: if feed_id != feed.pk: - logging.debug(" ---> %s doesn't match %s, rewriting to remove %s..." % ( - feed_id, feed.pk, feed_id)) + logging.debug( + " ---> %s doesn't match %s, rewriting to remove %s..." + % (feed_id, feed.pk, feed_id) + ) # Clear out duplicate sub in folders before subscribing to feed duplicate_feed = Feed.get_by_id(feed_id) duplicate_feed.pk = feed_id self.rewrite_feed(feed, duplicate_feed) - us, _ = UserSubscription.objects.get_or_create(user=self.user, feed=feed, defaults={ - 'needs_unread_recalc': True - }) + us, _ = UserSubscription.objects.get_or_create( + user=self.user, feed=feed, defaults={"needs_unread_recalc": True} + ) if not us.needs_unread_recalc: us.needs_unread_recalc = True us.save() elif feed_id and not feed: # No feed found for subscription, remove subscription - logging.debug(" ---> %s: No feed found, removing subscription: %s" % ( - self.user, feed_id)) + logging.debug(" ---> %s: No feed found, removing subscription: %s" % (self.user, feed_id)) self.delete_feed(feed_id, None, commit_delete=False) - missing_folder_feeds = set(subs) - set(all_feeds) if missing_folder_feeds: user_sub_folders = json.decode(self.folders) - logging.debug(" ---> %s is missing %s folder feeds. Adding %s..." % ( - self.user, len(missing_folder_feeds), missing_folder_feeds)) + logging.debug( + " ---> %s is missing %s folder feeds. Adding %s..." + % (self.user, len(missing_folder_feeds), missing_folder_feeds) + ) for feed_id in missing_folder_feeds: feed = Feed.get_by_id(feed_id) if feed and feed.pk == feed_id: user_sub_folders = add_object_to_folder(feed_id, "", user_sub_folders) self.folders = json.encode(user_sub_folders) self.save() - + def auto_activate(self): - if self.user.profile.is_premium: return - + if self.user.profile.is_premium: + return + active_count = UserSubscription.objects.filter(user=self.user, active=True).count() - if active_count: return - + if active_count: + return + all_feeds = self.flat() - if not all_feeds: return - + if not all_feeds: + return + for feed in all_feeds[:64]: try: sub = UserSubscription.objects.get(user=self.user, feed=feed) @@ -1844,20 +2070,22 @@ class Feature(models.Model): """ Simple blog-like feature board shown to all users on the home page. """ + description = models.TextField(default="") date = models.DateTimeField(default=datetime.datetime.now) - + def __str__(self): return "[%s] %s" % (self.date, self.description[:50]) - + class Meta: ordering = ["-date"] + class RUserUnreadStory: - """Model to store manually unread stories that are older than a user's unread_cutoff + """Model to store manually unread stories that are older than a user's unread_cutoff (same as days_of_unread). This is built for Premium Archive purposes. - If a story is marked as unread but is within the unread_cutoff, no need to add a + If a story is marked as unread but is within the unread_cutoff, no need to add a UserUnreadStory instance as it will be automatically marked as read according to the user's days_of_unread preference. """ @@ -1884,7 +2112,7 @@ def mark_read(cls, user_id, story_hashes, r=None): story_hashes = [story_hashes] if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - + pipeline = r.pipeline() for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) @@ -1895,7 +2123,7 @@ def mark_read(cls, user_id, story_hashes, r=None): pipeline.zrem(user_manual_unread_stories_key, story_hash) pipeline.zrem(user_manual_unread_stories_feed_key, story_hash) pipeline.execute() - + @classmethod def unreads(cls, user_id, story_hash): if not isinstance(story_hash, list): @@ -1920,16 +2148,15 @@ def switch_feed(cls, user_id, old_feed_id, new_feed_id): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() story_hashes = cls.get_stories_and_dates(user_id, old_feed_id, r=r) - - for (story_hash, story_timestamp) in story_hashes: + + for story_hash, story_timestamp in story_hashes: _, hash_story = MStory.split_story_hash(story_hash) new_story_hash = "%s:%s" % (new_feed_id, hash_story) # read_feed_key = "RS:%s:%s" % (user_id, new_feed_id) # user_manual_unread_stories_feed_key = f"uU:{user_id}:{new_feed_id}" cls.mark_unread(user_id, new_story_hash, story_timestamp, r=p) - + p.execute() - + if len(story_hashes) > 0: logging.info(" ---> %s archived unread stories" % len(story_hashes)) - diff --git a/apps/reader/tasks.py b/apps/reader/tasks.py index 0294bdf8d3..179500b6b3 100644 --- a/apps/reader/tasks.py +++ b/apps/reader/tasks.py @@ -6,13 +6,14 @@ from apps.reader.models import UserSubscription from apps.social.models import MSocialSubscription -@app.task(name='freshen-homepage') + +@app.task(name="freshen-homepage") def FreshenHomepage(): day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) user = User.objects.get(username=settings.HOMEPAGE_USERNAME) user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.save() - + usersubs = UserSubscription.objects.filter(user=user) logging.debug(" ---> %s has %s feeds, freshening..." % (user.username, usersubs.count())) for sub in usersubs: @@ -20,7 +21,7 @@ def FreshenHomepage(): sub.needs_unread_recalc = True sub.save() sub.calculate_feed_scores(silent=True) - + socialsubs = MSocialSubscription.objects.filter(user_id=user.pk) logging.debug(" ---> %s has %s socialsubs, freshening..." % (user.username, socialsubs.count())) for sub in socialsubs: @@ -29,12 +30,16 @@ def FreshenHomepage(): sub.save() sub.calculate_feed_scores(silent=True) -@app.task(name='clean-analytics', time_limit=720*10) + +@app.task(name="clean-analytics", time_limit=720 * 10) def CleanAnalytics(): - logging.debug(" ---> Cleaning analytics... %s feed fetches" % ( - settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(), - )) + logging.debug( + " ---> Cleaning analytics... %s feed fetches" + % (settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.count(),) + ) day_ago = datetime.datetime.utcnow() - datetime.timedelta(days=1) - settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many({ - "date": {"$lt": day_ago}, - }) + settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.delete_many( + { + "date": {"$lt": day_ago}, + } + ) diff --git a/apps/reader/test_reader.py b/apps/reader/test_reader.py index 7bde8b38d4..cd01cc84fb 100644 --- a/apps/reader/test_reader.py +++ b/apps/reader/test_reader.py @@ -5,129 +5,193 @@ from django.conf import settings from mongoengine.connection import connect, disconnect + class Test_Reader(TestCase): fixtures = [ - 'apps/rss_feeds/fixtures/initial_data.json', - 'apps/rss_feeds/fixtures/rss_feeds.json', - 'subscriptions.json', #'stories.json', - 'apps/rss_feeds/fixtures/gawker1.json'] - + "apps/rss_feeds/fixtures/initial_data.json", + "apps/rss_feeds/fixtures/rss_feeds.json", + "subscriptions.json", #'stories.json', + "apps/rss_feeds/fixtures/gawker1.json", + ] + def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') + settings.MONGODB = connect("test_newsblur") self.client = Client() def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') - + settings.MONGODB.drop_database("test_newsblur") + def test_api_feeds(self): - self.client.login(username='conesus', password='test') - - response = self.client.get(reverse('load-feeds')) + self.client.login(username="conesus", password="test") + + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(len(content['feeds']), 10) - self.assertEqual(content['feeds']['1']['feed_title'], 'The NewsBlur Blog') - self.assertEqual(content['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - + self.assertEqual(len(content["feeds"]), 10) + self.assertEqual(content["feeds"]["1"]["feed_title"], "The NewsBlur Blog") + self.assertEqual( + content["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + def test_delete_feed(self): - self.client.login(username='conesus', password='test') - response = self.client.get(reverse('load-feeds')) + self.client.login(username="conesus", password="test") + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - + self.assertEqual( + feeds["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 1, 'in_folder': ''}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 1, "in_folder": ""}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8, 9]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 9, 'in_folder': 'Blogs'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 9, "in_folder": "Blogs"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 5, 'in_folder': 'Tech'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 5, "in_folder": "Tech"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 4, 'in_folder': 'Tech'}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 4, "in_folder": "Tech"}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) - + self.assertEqual(feeds["folders"], [2, 3, 8, 9, {"Tech": [1, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}]) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 8, 'in_folder': ''}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 8, "in_folder": ""}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 9, {'Tech': [1, {'Deep Tech': [6, 7]}]}, {'Blogs': [8]}]) + self.assertEqual(feeds["folders"], [2, 3, 9, {"Tech": [1, {"Deep Tech": [6, 7]}]}, {"Blogs": [8]}]) def test_delete_feed__multiple_folders(self): - self.client.login(username='conesus', password='test') - - response = self.client.get(reverse('load-feeds')) + self.client.login(username="conesus", password="test") + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [{'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, 2, 3, 8, 9, {'Blogs': [8, 9]}, 1]) - + self.assertEqual( + feeds["folders"], [{"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, 2, 3, 8, 9, {"Blogs": [8, 9]}, 1] + ) + # Delete feed - response = self.client.post(reverse('delete-feed'), {'feed_id': 1}) + response = self.client.post(reverse("delete-feed"), {"feed_id": 1}) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [2, 3, 8, 9, {'Tech': [1, 4, 5, {'Deep Tech': [6, 7]}]}, {'Blogs': [8, 9]}]) - + self.assertEqual( + feeds["folders"], [2, 3, 8, 9, {"Tech": [1, 4, 5, {"Deep Tech": [6, 7]}]}, {"Blogs": [8, 9]}] + ) + def test_move_feeds_by_folder(self): - self.client.login(username='Dejal', password='test') + self.client.login(username="Dejal", password="test") - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379, 3530, 5994357]}, {"Videos": [3240, 5168]}]) - + self.assertEqual( + feeds["folders"], + [ + 5299728, + 644144, + 1187026, + {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15]}, + { + "Science & Technology": [ + 731503, + 140145, + 1272495, + 76, + 161, + 39, + {"Hacker": [5985150, 3323431]}, + ] + }, + {"Humor": [212379, 3530, 5994357]}, + {"Videos": [3240, 5168]}, + ], + ) + # Move feeds by folder - response = self.client.post(reverse('move-feeds-by-folder-to-folder'), {'feeds_by_folder': '[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', 'to_folder': 'Brainiacs & Opinion'}) + response = self.client.post( + reverse("move-feeds-by-folder-to-folder"), + { + "feeds_by_folder": '[\n [\n "5994357",\n "Humor"\n ],\n [\n "3530",\n "Humor"\n ]\n]', + "to_folder": "Brainiacs & Opinion", + }, + ) response = json.decode(response.content) - self.assertEqual(response['code'], 1) - - response = self.client.get(reverse('load-feeds')) + self.assertEqual(response["code"], 1) + + response = self.client.get(reverse("load-feeds")) feeds = json.decode(response.content) - self.assertEqual(feeds['folders'], [5299728, 644144, 1187026, {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, {"Science & Technology": [731503, 140145, 1272495, 76, 161, 39, {"Hacker": [5985150, 3323431]}]}, {"Humor": [212379]}, {"Videos": [3240, 5168]}]) - + self.assertEqual( + feeds["folders"], + [ + 5299728, + 644144, + 1187026, + {"Brainiacs & Opinion": [569, 38, 3581, 183139, 1186180, 15, 5994357, 3530]}, + { + "Science & Technology": [ + 731503, + 140145, + 1272495, + 76, + 161, + 39, + {"Hacker": [5985150, 3323431]}, + ] + }, + {"Humor": [212379]}, + {"Videos": [3240, 5168]}, + ], + ) + def test_load_single_feed(self): # from django.conf import settings # from django.db import connection # settings.DEBUG = True # connection.queries = [] - self.client.login(username='conesus', password='test') - url = reverse('load-single-feed', kwargs=dict(feed_id=1)) + self.client.login(username="conesus", password="test") + url = reverse("load-single-feed", kwargs=dict(feed_id=1)) response = self.client.get(url) feed = json.decode(response.content) - self.assertEqual(len(feed['feed_tags']), 0) - self.assertEqual(len(feed['classifiers']['tags']), 0) + self.assertEqual(len(feed["feed_tags"]), 0) + self.assertEqual(len(feed["classifiers"]["tags"]), 0) # self.assert_(connection.queries) - + # settings.DEBUG = False - + def test_compact_user_subscription_folders(self): usf = UserSubscriptionFolders.objects.get(user=User.objects.all()[0]) usf.folders = '[2, 3, {"Bloglets": [423, 424, 425]}, {"Blogs": [426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 867, 946, 947, 948]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [557, 558, 559]}, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 522, 523, 524, 525, 526, 527, 528, 507, 520, 867]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [558, 559, 557]}, 943, {"Link Blogs": [467, 468, 469, 470]}, {"People": [471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 504, 505, 506, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 522, 523, 525, 526, 527, 528]}, {"Tumblrs": [529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549]}, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Travel": [558, 559]}]}, {"Code": [560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583]}, {"Cooking": [584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 873, 953]}, {"Meta": [598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608]}, {"New York": [609, 610, 611, 612, 613, 614]}, {"San Francisco": [615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 875]}, {"Tech": [635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 184, 661, 662, 663, 664, 665, 666]}, {"Comics & Cartoons": [667, 668, 669, 670, 671, 672, 673, 63, 674, 675, 676, 677, 678, 679, 680, 681, 682, 109, 683, 684, 685, 958]}, {"Hardware": [686, 687, 688, 689, 690, 691, 692]}, {"Wood": []}, {"Newsletters": [693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 724, 719, 720, 721, 722, 723, 725, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 895]}, {"Woodworking": [784, 785, 786, 787, 788, 789, 790, 791, 792, 793]}, {"Twitter": [794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 838, 915]}, {"News": [808, 809, 810, 811, 812, 813, 814, 815, 816, 817]}, {"Home": [818, 819, 820, 821, 822, 823]}, {"Facebook": [824, 825, 826]}, {"Art": [827, 828]}, {"Science": [403, 404, 405, 401, 402]}, {"Boston": [829, 830]}, {"mobility": [831, 832, 833, 834, 835, 836, 837, 963]}, {"Biking": []}, {"A Muted Folder": [1]}, 1, {"Any Broken Feeds": [916]}, {"Any Broken Feeds, Although Some of These Work Fine": [4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 840, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 841, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 842, 50, 51, 52, 53, 54, 843, 56, 57, 58, 59, 60, 61, 62, 63, 844, 917, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 918, 130, 131, 132, 846, 134, 135, 136, 919, 138, 139, 140, 141, 142, 143, 144, 145, 847, 147, 848, 149, 150, 151, 152, 153, 154, 849, 156, 157, 158, 936, 160, 850, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 1, 185, 186, 187, 188, 189, 851, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 852, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 853, 243, 854, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 856, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 939, 281, 282, 283, 284, 285, 940, 287, 288, 289, 857, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 858, 354, 355, 859, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 860, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, {"Ubuntu": [396, 397, 398, 399, 400]}, {"Science": [401, 402, 403, 404, 405]}, {"Music": [406, 407, 408, 409, 410, 411, 412]}, {"NYTimes": [413]}, {"Test": [414]}, {"Organizer": [415, 416, 417]}, {"Adult": [418, 419, 861, 421]}, {"Test": []}, 422]}]' @@ -137,7 +201,7 @@ def test_compact_user_subscription_folders(self): compact_folders = usf.folders self.assertNotEquals(dupe_folders, compact_folders) - + def test_compact_user_subscription_folders2(self): usf = UserSubscriptionFolders.objects.get(user=User.objects.all()[0]) usf.folders = '[2, 3, {"Bloglets": [423, 424, 425]}, {"Blogs": [426, 427, 428, 429, 430, {"Photo Blogs": [550, 551, 552, 553, 554, 555, 556]}, {"Photo Blogs": [551, 552, 553, 554, 555, 556]}, {"Travel": [557, 558]}, {"Travel": [557, 559]}, 943, {"Link Blogs": [467, 468, 469, 470, {"Travel": [557, 558]}, {"Travel": [557, 559]}]}, {"Link Blogs": [467, 468, 469, 470, {"Travel": [557, 558]}, {"Travel": [557, 559, 558]}]}]}]' diff --git a/apps/reader/urls.py b/apps/reader/urls.py index 9f829e59e1..f81ac9873b 100644 --- a/apps/reader/urls.py +++ b/apps/reader/urls.py @@ -2,67 +2,85 @@ from apps.reader import views urlpatterns = [ - url(r'^$', views.index), - url(r'^buster', views.iframe_buster, name='iframe-buster'), - url(r'^login_as', views.login_as, name='login_as'), - url(r'^welcome', views.welcome_req, name='welcome'), - url(r'^logout', views.logout, name='welcome-logout'), - url(r'^login', views.login, name='welcome-login'), - url(r'^autologin/(?P\w+)/(?P\w+)/?', views.autologin, name='autologin'), - url(r'^signup', views.signup, name='welcome-signup'), - url(r'^feeds/?$', views.load_feeds, name='load-feeds'), - url(r'^feed/(?P\d+)', views.load_single_feed, name='load-single-feed'), - url(r'^page/(?P\d+)', views.load_feed_page, name='load-feed-page'), - url(r'^refresh_feed/(?P\d+)', views.refresh_feed, name='refresh-feed'), - url(r'^favicons', views.load_feed_favicons, name='load-feed-favicons'), - url(r'^river_stories_widget', views.load_river_stories_widget, name='load-river-stories-widget'), - url(r'^river_stories', views.load_river_stories__redis, name='load-river-stories'), - url(r'^complete_river', views.complete_river, name='complete-river'), - url(r'^refresh_feeds', views.refresh_feeds, name='refresh-feeds'), - url(r'^interactions_count', views.interactions_count, name='interactions-count'), - url(r'^feed_unread_count', views.feed_unread_count, name='feed-unread-count'), - url(r'^starred_stories', views.load_starred_stories, name='load-starred-stories'), - url(r'^read_stories', views.load_read_stories, name='load-read-stories'), - url(r'^starred_story_hashes', views.starred_story_hashes, name='starred-story-hashes'), - url(r'^starred_rss/(?P\d+)/(?P\w+)/?$', views.starred_stories_rss_feed, name='starred-stories-rss-feed'), - url(r'^starred_rss/(?P\d+)/(?P\w+)/(?P[-\w]+)?/?$', views.starred_stories_rss_feed_tag, name='starred-stories-rss-feed-tag'), - url(r'^folder_rss/(?P\d+)/(?P\w+)/(?P\w+)/(?P[-\w]+)?/?$', views.folder_rss_feed, name='folder-rss-feed'), - url(r'^unread_story_hashes', views.unread_story_hashes, name='unread-story-hashes'), - url(r'^starred_counts', views.starred_counts, name='starred-counts'), - url(r'^mark_all_as_read', views.mark_all_as_read, name='mark-all-as-read'), - url(r'^mark_story_as_read', views.mark_story_as_read, name='mark-story-as-read'), - url(r'^mark_story_hashes_as_read', views.mark_story_hashes_as_read, name='mark-story-hashes-as-read'), - url(r'^mark_feed_stories_as_read', views.mark_feed_stories_as_read, name='mark-feed-stories-as-read'), - url(r'^mark_social_stories_as_read', views.mark_social_stories_as_read, name='mark-social-stories-as-read'), - url(r'^mark_story_as_unread', views.mark_story_as_unread), - url(r'^mark_story_hash_as_unread', views.mark_story_hash_as_unread, name='mark-story-hash-as-unread'), - url(r'^mark_story_as_starred', views.mark_story_as_starred), - url(r'^mark_story_hash_as_starred', views.mark_story_hash_as_starred), - url(r'^mark_story_as_unstarred', views.mark_story_as_unstarred), - url(r'^mark_story_hash_as_unstarred', views.mark_story_hash_as_unstarred), - url(r'^mark_feed_as_read', views.mark_feed_as_read), - url(r'^delete_feed_by_url', views.delete_feed_by_url, name='delete-feed-by-url'), - url(r'^delete_feeds_by_folder', views.delete_feeds_by_folder, name='delete-feeds-by-folder'), - url(r'^delete_feed', views.delete_feed, name='delete-feed'), - url(r'^delete_folder', views.delete_folder, name='delete-folder'), - url(r'^rename_feed', views.rename_feed, name='rename-feed'), - url(r'^rename_folder', views.rename_folder, name='rename-folder'), - url(r'^move_feed_to_folders', views.move_feed_to_folders, name='move-feed-to-folders'), - url(r'^move_feed_to_folder', views.move_feed_to_folder, name='move-feed-to-folder'), - url(r'^move_folder_to_folder', views.move_folder_to_folder, name='move-folder-to-folder'), - url(r'^move_feeds_by_folder_to_folder', views.move_feeds_by_folder_to_folder, name='move-feeds-by-folder-to-folder'), - url(r'^add_url', views.add_url), - url(r'^add_folder', views.add_folder), - url(r'^add_feature', views.add_feature, name='add-feature'), - url(r'^features', views.load_features, name='load-features'), - url(r'^save_feed_order', views.save_feed_order, name='save-feed-order'), - url(r'^feeds_trainer', views.feeds_trainer, name='feeds-trainer'), - url(r'^save_feed_chooser', views.save_feed_chooser, name='save-feed-chooser'), - url(r'^send_story_email', views.send_story_email, name='send-story-email'), - url(r'^retrain_all_sites', views.retrain_all_sites, name='retrain-all-sites'), - url(r'^load_tutorial', views.load_tutorial, name='load-tutorial'), - url(r'^save_search', views.save_search, name='save-search'), - url(r'^delete_search', views.delete_search, name='delete-search'), - url(r'^save_dashboard_river', views.save_dashboard_river, name='save-dashboard-river'), - url(r'^remove_dashboard_river', views.remove_dashboard_river, name='remove-dashboard-river'), + url(r"^$", views.index), + url(r"^buster", views.iframe_buster, name="iframe-buster"), + url(r"^login_as", views.login_as, name="login_as"), + url(r"^welcome", views.welcome_req, name="welcome"), + url(r"^logout", views.logout, name="welcome-logout"), + url(r"^login", views.login, name="welcome-login"), + url(r"^autologin/(?P\w+)/(?P\w+)/?", views.autologin, name="autologin"), + url(r"^signup", views.signup, name="welcome-signup"), + url(r"^feeds/?$", views.load_feeds, name="load-feeds"), + url(r"^feed/(?P\d+)", views.load_single_feed, name="load-single-feed"), + url(r"^page/(?P\d+)", views.load_feed_page, name="load-feed-page"), + url(r"^refresh_feed/(?P\d+)", views.refresh_feed, name="refresh-feed"), + url(r"^favicons", views.load_feed_favicons, name="load-feed-favicons"), + url(r"^river_stories_widget", views.load_river_stories_widget, name="load-river-stories-widget"), + url(r"^river_stories", views.load_river_stories__redis, name="load-river-stories"), + url(r"^complete_river", views.complete_river, name="complete-river"), + url(r"^refresh_feeds", views.refresh_feeds, name="refresh-feeds"), + url(r"^interactions_count", views.interactions_count, name="interactions-count"), + url(r"^feed_unread_count", views.feed_unread_count, name="feed-unread-count"), + url(r"^starred_stories", views.load_starred_stories, name="load-starred-stories"), + url(r"^read_stories", views.load_read_stories, name="load-read-stories"), + url(r"^starred_story_hashes", views.starred_story_hashes, name="starred-story-hashes"), + url( + r"^starred_rss/(?P\d+)/(?P\w+)/?$", + views.starred_stories_rss_feed, + name="starred-stories-rss-feed", + ), + url( + r"^starred_rss/(?P\d+)/(?P\w+)/(?P[-\w]+)?/?$", + views.starred_stories_rss_feed_tag, + name="starred-stories-rss-feed-tag", + ), + url( + r"^folder_rss/(?P\d+)/(?P\w+)/(?P\w+)/(?P[-\w]+)?/?$", + views.folder_rss_feed, + name="folder-rss-feed", + ), + url(r"^unread_story_hashes", views.unread_story_hashes, name="unread-story-hashes"), + url(r"^starred_counts", views.starred_counts, name="starred-counts"), + url(r"^mark_all_as_read", views.mark_all_as_read, name="mark-all-as-read"), + url(r"^mark_story_as_read", views.mark_story_as_read, name="mark-story-as-read"), + url(r"^mark_story_hashes_as_read", views.mark_story_hashes_as_read, name="mark-story-hashes-as-read"), + url(r"^mark_feed_stories_as_read", views.mark_feed_stories_as_read, name="mark-feed-stories-as-read"), + url( + r"^mark_social_stories_as_read", views.mark_social_stories_as_read, name="mark-social-stories-as-read" + ), + url(r"^mark_story_as_unread", views.mark_story_as_unread), + url(r"^mark_story_hash_as_unread", views.mark_story_hash_as_unread, name="mark-story-hash-as-unread"), + url(r"^mark_story_as_starred", views.mark_story_as_starred), + url(r"^mark_story_hash_as_starred", views.mark_story_hash_as_starred), + url(r"^mark_story_as_unstarred", views.mark_story_as_unstarred), + url(r"^mark_story_hash_as_unstarred", views.mark_story_hash_as_unstarred), + url(r"^mark_feed_as_read", views.mark_feed_as_read), + url(r"^delete_feed_by_url", views.delete_feed_by_url, name="delete-feed-by-url"), + url(r"^delete_feeds_by_folder", views.delete_feeds_by_folder, name="delete-feeds-by-folder"), + url(r"^delete_feed", views.delete_feed, name="delete-feed"), + url(r"^delete_folder", views.delete_folder, name="delete-folder"), + url(r"^rename_feed", views.rename_feed, name="rename-feed"), + url(r"^rename_folder", views.rename_folder, name="rename-folder"), + url(r"^move_feed_to_folders", views.move_feed_to_folders, name="move-feed-to-folders"), + url(r"^move_feed_to_folder", views.move_feed_to_folder, name="move-feed-to-folder"), + url(r"^move_folder_to_folder", views.move_folder_to_folder, name="move-folder-to-folder"), + url( + r"^move_feeds_by_folder_to_folder", + views.move_feeds_by_folder_to_folder, + name="move-feeds-by-folder-to-folder", + ), + url(r"^add_url", views.add_url), + url(r"^add_folder", views.add_folder), + url(r"^add_feature", views.add_feature, name="add-feature"), + url(r"^features", views.load_features, name="load-features"), + url(r"^save_feed_order", views.save_feed_order, name="save-feed-order"), + url(r"^feeds_trainer", views.feeds_trainer, name="feeds-trainer"), + url(r"^save_feed_chooser", views.save_feed_chooser, name="save-feed-chooser"), + url(r"^send_story_email", views.send_story_email, name="send-story-email"), + url(r"^retrain_all_sites", views.retrain_all_sites, name="retrain-all-sites"), + url(r"^load_tutorial", views.load_tutorial, name="load-tutorial"), + url(r"^save_search", views.save_search, name="save-search"), + url(r"^delete_search", views.delete_search, name="delete-search"), + url(r"^save_dashboard_river", views.save_dashboard_river, name="save-dashboard-river"), + url(r"^remove_dashboard_river", views.remove_dashboard_river, name="remove-dashboard-river"), ] diff --git a/apps/reader/views.py b/apps/reader/views.py index 29a316ec42..2c15b0f560 100644 --- a/apps/reader/views.py +++ b/apps/reader/views.py @@ -113,229 +113,234 @@ "brentozar.com", ] ALLOWED_SUBDOMAINS = [ - 'dev', - 'www', - 'hwww', - 'dwww', + "dev", + "www", + "hwww", + "dwww", # 'beta', # Comment to redirect beta -> www, uncomment to allow beta -> staging (+ dns changes) - 'staging', - 'hstaging', - 'discovery', - 'debug', - 'debug3', - 'staging2', - 'staging3', - 'nb', + "staging", + "hstaging", + "discovery", + "debug", + "debug3", + "staging2", + "staging3", + "nb", ] + def get_subdomain(request): - host = request.META.get('HTTP_HOST') + host = request.META.get("HTTP_HOST") if host and host.count(".") >= 2: return host.split(".")[0] else: return None + @never_cache -@render_to('reader/dashboard.xhtml') +@render_to("reader/dashboard.xhtml") def index(request, **kwargs): - subdomain = get_subdomain(request) if request.method == "GET" and subdomain and subdomain not in ALLOWED_SUBDOMAINS: username = request.subdomain or subdomain - if '.' in username: - username = username.split('.')[0] + if "." in username: + username = username.split(".")[0] user = User.objects.filter(username=username) if not user: user = User.objects.filter(username__iexact=username) if user: user = user[0] if not user: - return HttpResponseRedirect('http://%s%s' % ( - Site.objects.get_current().domain, - reverse('index'))) + return HttpResponseRedirect("http://%s%s" % (Site.objects.get_current().domain, reverse("index"))) return load_social_page(request, user_id=user.pk, username=request.subdomain, **kwargs) if request.user.is_anonymous: return welcome(request, **kwargs) else: return dashboard(request, **kwargs) + def dashboard(request, **kwargs): - user = request.user - feed_count = UserSubscription.objects.filter(user=request.user).count() + user = request.user + feed_count = UserSubscription.objects.filter(user=request.user).count() # recommended_feeds = RecommendedFeed.objects.filter(is_public=True, # approved_date__lte=datetime.datetime.now() # ).select_related('feed')[:2] unmoderated_feeds = [] if user.is_staff: - unmoderated_feeds = RecommendedFeed.objects.filter(is_public=False, - declined_date__isnull=True - ).select_related('feed')[:2] - statistics = MStatistics.all() - social_profile = MSocialProfile.get_user(user.pk) - custom_styling = MCustomStyling.get_user(user.pk) - dashboard_rivers = MDashboardRiver.get_user_rivers(user.pk) - preferences = json.decode(user.profile.preferences) - + unmoderated_feeds = RecommendedFeed.objects.filter( + is_public=False, declined_date__isnull=True + ).select_related("feed")[:2] + statistics = MStatistics.all() + social_profile = MSocialProfile.get_user(user.pk) + custom_styling = MCustomStyling.get_user(user.pk) + dashboard_rivers = MDashboardRiver.get_user_rivers(user.pk) + preferences = json.decode(user.profile.preferences) + if not user.is_active: - url = "https://%s%s" % (Site.objects.get_current().domain, - reverse('stripe-form')) + url = "https://%s%s" % (Site.objects.get_current().domain, reverse("stripe-form")) return HttpResponseRedirect(url) logging.user(request, "~FBLoading dashboard") return { - 'user_profile' : user.profile, - 'preferences' : preferences, - 'feed_count' : feed_count, - 'custom_styling' : custom_styling, - 'dashboard_rivers' : dashboard_rivers, - 'account_images' : list(range(1, 4)), + "user_profile": user.profile, + "preferences": preferences, + "feed_count": feed_count, + "custom_styling": custom_styling, + "dashboard_rivers": dashboard_rivers, + "account_images": list(range(1, 4)), # 'recommended_feeds' : recommended_feeds, - 'unmoderated_feeds' : unmoderated_feeds, - 'statistics' : statistics, - 'social_profile' : social_profile, - 'debug' : settings.DEBUG, - 'debug_assets' : settings.DEBUG_ASSETS, + "unmoderated_feeds": unmoderated_feeds, + "statistics": statistics, + "social_profile": social_profile, + "debug": settings.DEBUG, + "debug_assets": settings.DEBUG_ASSETS, }, "reader/dashboard.xhtml" -@render_to('reader/dashboard.xhtml') + +@render_to("reader/dashboard.xhtml") def welcome_req(request, **kwargs): return welcome(request, **kwargs) + def welcome(request, **kwargs): - user = get_user(request) - statistics = MStatistics.all() - social_profile = MSocialProfile.get_user(user.pk) - + user = get_user(request) + statistics = MStatistics.all() + social_profile = MSocialProfile.get_user(user.pk) + if request.method == "POST": - if request.POST.get('submit', '').startswith('log'): - login_form = LoginForm(request.POST, prefix='login') - signup_form = SignupForm(prefix='signup') + if request.POST.get("submit", "").startswith("log"): + login_form = LoginForm(request.POST, prefix="login") + signup_form = SignupForm(prefix="signup") else: - signup_form = SignupForm(request.POST, prefix='signup') - return { - "form": signup_form - }, "accounts/signup.html" + signup_form = SignupForm(request.POST, prefix="signup") + return {"form": signup_form}, "accounts/signup.html" else: - login_form = LoginForm(prefix='login') - signup_form = SignupForm(prefix='signup') - + login_form = LoginForm(prefix="login") + signup_form = SignupForm(prefix="signup") + logging.user(request, "~FBLoading welcome") - + return { - 'user_profile' : hasattr(user, 'profile') and user.profile, - 'login_form' : login_form, - 'signup_form' : signup_form, - 'statistics' : statistics, - 'social_profile' : social_profile, - 'post_request' : request.method == 'POST', + "user_profile": hasattr(user, "profile") and user.profile, + "login_form": login_form, + "signup_form": signup_form, + "statistics": statistics, + "social_profile": social_profile, + "post_request": request.method == "POST", }, "reader/welcome.xhtml" + @never_cache def login(request): code = -1 message = "" if request.method == "POST": - form = LoginForm(request.POST, prefix='login') + form = LoginForm(request.POST, prefix="login") if form.is_valid(): - login_user(request, form.get_user(), backend='django.contrib.auth.backends.ModelBackend') - if request.POST.get('api'): + login_user(request, form.get_user(), backend="django.contrib.auth.backends.ModelBackend") + if request.POST.get("api"): logging.user(form.get_user(), "~FG~BB~SKiPhone Login~FW") code = 1 else: logging.user(form.get_user(), "~FG~BBLogin~FW") - next_url = request.POST.get('next', '') + next_url = request.POST.get("next", "") if next_url: return HttpResponseRedirect(next_url) - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) else: message = list(form.errors.items())[0][1][0] - if request.POST.get('api'): - return HttpResponse(json.encode(dict(code=code, message=message)), content_type='application/json') + if request.POST.get("api"): + return HttpResponse(json.encode(dict(code=code, message=message)), content_type="application/json") else: return index(request) - + + @never_cache -@render_to('accounts/signup.html') +@render_to("accounts/signup.html") def signup(request): if request.method == "POST": if settings.ENFORCE_SIGNUP_CAPTCHA: - signup_form = SignupForm(request.POST, prefix='signup') - return { - "form": signup_form - } + signup_form = SignupForm(request.POST, prefix="signup") + return {"form": signup_form} - form = SignupForm(prefix='signup', data=request.POST) + form = SignupForm(prefix="signup", data=request.POST) if form.is_valid(): new_user = form.save() - login_user(request, new_user, backend='django.contrib.auth.backends.ModelBackend') + login_user(request, new_user, backend="django.contrib.auth.backends.ModelBackend") logging.user(new_user, "~FG~SB~BBNEW SIGNUP: ~FW%s" % new_user.email) if not new_user.is_active: - url = "https://%s%s" % (Site.objects.get_current().domain, - reverse('stripe-form')) + url = "https://%s%s" % (Site.objects.get_current().domain, reverse("stripe-form")) return HttpResponseRedirect(url) else: - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + return index(request) - + + @never_cache def logout(request): logging.user(request, "~FG~BBLogout~FW") logout_user(request) - - if request.GET.get('api'): - return HttpResponse(json.encode(dict(code=1)), content_type='application/json') + + if request.GET.get("api"): + return HttpResponse(json.encode(dict(code=1)), content_type="application/json") else: - return HttpResponseRedirect(reverse('index')) + return HttpResponseRedirect(reverse("index")) + def autologin(request, username, secret): - next = request.GET.get('next', '') - + next = request.GET.get("next", "") + if not username or not secret: return HttpResponseForbidden() - + profile = Profile.objects.filter(user__username=username, secret_token=secret) if not profile: return HttpResponseForbidden() user = profile[0].user user.backend = settings.AUTHENTICATION_BACKENDS[0] - login_user(request, user, backend='django.contrib.auth.backends.ModelBackend') - logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else 'Homepage',)) - - if next and not next.startswith('/'): - next = '?next=' + next - return HttpResponseRedirect(reverse('index') + next) + login_user(request, user, backend="django.contrib.auth.backends.ModelBackend") + logging.user(user, "~FG~BB~SKAuto-Login. Next stop: %s~FW" % (next if next else "Homepage",)) + + if next and not next.startswith("/"): + next = "?next=" + next + return HttpResponseRedirect(reverse("index") + next) elif next: return HttpResponseRedirect(next) else: - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + + @ratelimit(minutes=1, requests=60) @never_cache @json.json_view def load_feeds(request): - user = get_user(request) - feeds = {} - include_favicons = is_true(request.GET.get('include_favicons', False)) - flat = is_true(request.GET.get('flat', False)) - update_counts = is_true(request.GET.get('update_counts', True)) - version = int(request.GET.get('v', 1)) - - if include_favicons == 'false': include_favicons = False - if update_counts == 'false': update_counts = False - if flat == 'false': flat = False - - if flat: return load_feeds_flat(request) + user = get_user(request) + feeds = {} + include_favicons = is_true(request.GET.get("include_favicons", False)) + flat = is_true(request.GET.get("flat", False)) + update_counts = is_true(request.GET.get("update_counts", True)) + version = int(request.GET.get("v", 1)) + + if include_favicons == "false": + include_favicons = False + if update_counts == "false": + update_counts = False + if flat == "false": + flat = False + + if flat: + return load_feeds_flat(request) platform = extract_user_agent(request) - if platform in ['iPhone', 'iPad', 'Androd']: + if platform in ["iPhone", "iPad", "Androd"]: # Remove this check once the iOS and Android updates go out which have update_counts=False # and then guarantee a refresh_feeds call update_counts = False - + try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: @@ -344,10 +349,10 @@ def load_feeds(request): except UserSubscriptionFolders.MultipleObjectsReturned: UserSubscriptionFolders.objects.filter(user=user)[1:].delete() folders = UserSubscriptionFolders.objects.get(user=user) - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user) + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user) notifications = MUserFeedNotification.feeds_for_user(user.pk) - + day_ago = datetime.datetime.now() - datetime.timedelta(days=1) scheduled_feeds = [] for sub in user_subs: @@ -355,8 +360,9 @@ def load_feeds(request): if update_counts and sub.needs_unread_recalc: sub.calculate_feed_scores(silent=True) feeds[pk] = sub.canonical(include_favicon=include_favicons) - - if not sub.active: continue + + if not sub.active: + continue if pk in notifications: feeds[pk].update(notifications[pk]) if not sub.feed.active and not sub.feed.has_feed_exception: @@ -365,22 +371,24 @@ def load_feeds(request): scheduled_feeds.append(sub.feed.pk) elif sub.feed.next_scheduled_update < day_ago: scheduled_feeds.append(sub.feed.pk) - + if len(scheduled_feeds) > 0 and request.user.is_authenticated: - logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + request, + "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % len(scheduled_feeds), + ) ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk)) starred_counts, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) if not starred_count and len(starred_counts): starred_count = MStarredStory.objects(user_id=user.pk).count() - + saved_searches = MSavedSearch.user_searches(user.pk) - + social_params = { - 'user_id': user.pk, - 'include_favicon': include_favicons, - 'update_counts': update_counts, + "user_id": user.pk, + "include_favicon": include_favicons, + "update_counts": update_counts, } social_feeds = MSocialSubscription.feeds(**social_params) social_profile = MSocialProfile.profile(user.pk) @@ -391,74 +399,81 @@ def load_feeds(request): if not user_subs: categories = MCategory.serialize() - logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" % ( - len(list(feeds.keys())), len(social_feeds), '. ~FCUpdating counts.' if update_counts else '')) + logging.user( + request, + "~FB~SBLoading ~FY%s~FB/~FM%s~FB feeds/socials%s" + % (len(list(feeds.keys())), len(social_feeds), ". ~FCUpdating counts." if update_counts else ""), + ) data = { - 'feeds': list(feeds.values()) if version == 2 else feeds, - 'social_feeds': social_feeds, - 'social_profile': social_profile, - 'social_services': social_services, - 'user_profile': user.profile, + "feeds": list(feeds.values()) if version == 2 else feeds, + "social_feeds": social_feeds, + "social_profile": social_profile, + "social_services": social_services, + "user_profile": user.profile, "is_staff": user.is_staff, - 'user_id': user.pk, - 'folders': json.decode(folders.folders), - 'starred_count': starred_count, - 'starred_counts': starred_counts, - 'saved_searches': saved_searches, - 'dashboard_rivers': dashboard_rivers, - 'categories': categories, - 'share_ext_token': user.profile.secret_token, + "user_id": user.pk, + "folders": json.decode(folders.folders), + "starred_count": starred_count, + "starred_counts": starred_counts, + "saved_searches": saved_searches, + "dashboard_rivers": dashboard_rivers, + "categories": categories, + "share_ext_token": user.profile.secret_token, } return data + @json.json_view def load_feed_favicons(request): user = get_user(request) - feed_ids = request.GET.getlist('feed_ids') or request.GET.getlist('feed_ids[]') - + feed_ids = request.GET.getlist("feed_ids") or request.GET.getlist("feed_ids[]") + if not feed_ids: - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) - feed_ids = [sub['feed__pk'] for sub in user_subs.values('feed__pk')] + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) + feed_ids = [sub["feed__pk"] for sub in user_subs.values("feed__pk")] feed_icons = dict([(i.feed_id, i.data) for i in MFeedIcon.objects(feed_id__in=feed_ids)]) - + return feed_icons + def load_feeds_flat(request): user = request.user - include_favicons = is_true(request.GET.get('include_favicons', False)) - update_counts = is_true(request.GET.get('update_counts', True)) - include_inactive = is_true(request.GET.get('include_inactive', False)) - background_ios = is_true(request.GET.get('background_ios', False)) - + include_favicons = is_true(request.GET.get("include_favicons", False)) + update_counts = is_true(request.GET.get("update_counts", True)) + include_inactive = is_true(request.GET.get("include_inactive", False)) + background_ios = is_true(request.GET.get("background_ios", False)) + feeds = {} inactive_feeds = {} day_ago = datetime.datetime.now() - datetime.timedelta(days=1) scheduled_feeds = [] - iphone_version = "2.1" # Preserved forever. Don't change. + iphone_version = "2.1" # Preserved forever. Don't change. latest_ios_build = "52" latest_ios_version = "5.0.0b2" - - if include_favicons == 'false': include_favicons = False - if update_counts == 'false': update_counts = False - + + if include_favicons == "false": + include_favicons = False + if update_counts == "false": + update_counts = False + if not user.is_authenticated: return HttpResponseForbidden() - + try: folders = UserSubscriptionFolders.objects.get(user=user) except UserSubscriptionFolders.DoesNotExist: folders = [] - - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) + + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) notifications = MUserFeedNotification.feeds_for_user(user.pk) if not user_subs and folders: folders.auto_activate() - user_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=True) + user_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=True) if include_inactive: - inactive_subs = UserSubscription.objects.select_related('feed').filter(user=user, active=False) - + inactive_subs = UserSubscription.objects.select_related("feed").filter(user=user, active=False) + for sub in user_subs: pk = sub.feed_id if update_counts and sub.needs_unread_recalc: @@ -472,28 +487,28 @@ def load_feeds_flat(request): scheduled_feeds.append(sub.feed.pk) if pk in notifications: feeds[pk].update(notifications[pk]) - - + if include_inactive: for sub in inactive_subs: inactive_feeds[sub.feed_id] = sub.canonical(include_favicon=include_favicons) - + if len(scheduled_feeds) > 0 and request.user.is_authenticated: - logging.user(request, "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % - len(scheduled_feeds)) + logging.user( + request, + "~SN~FMTasking the scheduling immediate fetch of ~SB%s~SN feeds..." % len(scheduled_feeds), + ) ScheduleImmediateFetches.apply_async(kwargs=dict(feed_ids=scheduled_feeds, user_id=user.pk)) - + flat_folders = [] flat_folders_with_inactive = [] if folders: flat_folders = folders.flatten_folders(feeds=feeds) - flat_folders_with_inactive = folders.flatten_folders(feeds=feeds, - inactive_feeds=inactive_feeds) - + flat_folders_with_inactive = folders.flatten_folders(feeds=feeds, inactive_feeds=inactive_feeds) + social_params = { - 'user_id': user.pk, - 'include_favicon': include_favicons, - 'update_counts': update_counts, + "user_id": user.pk, + "include_favicon": include_favicons, + "update_counts": update_counts, } social_feeds = MSocialSubscription.feeds(**social_params) social_profile = MSocialProfile.profile(user.pk) @@ -508,13 +523,21 @@ def load_feeds_flat(request): saved_searches = MSavedSearch.user_searches(user.pk) - logging.user(request, "~FB~SBLoading ~FY%s~FB/~FM%s~FB/~FR%s~FB feeds/socials/inactive ~FMflat~FB%s%s" % ( - len(list(feeds.keys())), len(social_feeds), len(inactive_feeds), '. ~FCUpdating counts.' if update_counts else '', - ' ~BB(background fetch)' if background_ios else '')) + logging.user( + request, + "~FB~SBLoading ~FY%s~FB/~FM%s~FB/~FR%s~FB feeds/socials/inactive ~FMflat~FB%s%s" + % ( + len(list(feeds.keys())), + len(social_feeds), + len(inactive_feeds), + ". ~FCUpdating counts." if update_counts else "", + " ~BB(background fetch)" if background_ios else "", + ), + ) data = { - "flat_folders": flat_folders, - "flat_folders_with_inactive": flat_folders_with_inactive, + "flat_folders": flat_folders, + "flat_folders_with_inactive": flat_folders_with_inactive, "feeds": feeds, "inactive_feeds": inactive_feeds if include_inactive else {"0": "Include `include_inactive=true`"}, "social_feeds": social_feeds, @@ -528,20 +551,22 @@ def load_feeds_flat(request): "latest_ios_build": latest_ios_build, "latest_ios_version": latest_ios_version, "categories": categories, - 'starred_count': starred_count, - 'starred_counts': starred_counts, - 'saved_searches': saved_searches, - 'share_ext_token': user.profile.secret_token, + "starred_count": starred_count, + "starred_counts": starred_counts, + "saved_searches": saved_searches, + "share_ext_token": user.profile.secret_token, } return data + class ratelimit_refresh_feeds(ratelimit): def should_ratelimit(self, request): - feed_ids = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') + feed_ids = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") if len(feed_ids) == 1: return False return True + @ratelimit_refresh_feeds(minutes=1, requests=30) @never_cache @json.json_view @@ -550,33 +575,34 @@ def refresh_feeds(request): start = datetime.datetime.now() start_time = time.time() user = get_user(request) - feed_ids = get_post.getlist('feed_id') or get_post.getlist('feed_id[]') - check_fetch_status = get_post.get('check_fetch_status') - favicons_fetching = get_post.getlist('favicons_fetching') or get_post.getlist('favicons_fetching[]') - social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id] + feed_ids = get_post.getlist("feed_id") or get_post.getlist("feed_id[]") + check_fetch_status = get_post.get("check_fetch_status") + favicons_fetching = get_post.getlist("favicons_fetching") or get_post.getlist("favicons_fetching[]") + social_feed_ids = [feed_id for feed_id in feed_ids if "social:" in feed_id] feed_ids = list(set(feed_ids) - set(social_feed_ids)) - + feeds = {} if feed_ids or (not social_feed_ids and not feed_ids): - feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, - check_fetch_status=check_fetch_status) + feeds = UserSubscription.feeds_with_updated_counts( + user, feed_ids=feed_ids, check_fetch_status=check_fetch_status + ) checkpoint1 = datetime.datetime.now() social_feeds = {} if social_feed_ids or (not social_feed_ids and not feed_ids): social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids) checkpoint2 = datetime.datetime.now() - + favicons_fetching = [int(f) for f in favicons_fetching if f] feed_icons = {} if favicons_fetching: feed_icons = dict([(i.feed_id, i) for i in MFeedIcon.objects(feed_id__in=favicons_fetching)]) for feed_id, feed in list(feeds.items()): if feed_id in favicons_fetching and feed_id in feed_icons: - feeds[feed_id]['favicon'] = feed_icons[feed_id].data - feeds[feed_id]['favicon_color'] = feed_icons[feed_id].color - feeds[feed_id]['favicon_fetching'] = feed.get('favicon_fetching') + feeds[feed_id]["favicon"] = feed_icons[feed_id].data + feeds[feed_id]["favicon_color"] = feed_icons[feed_id].color + feeds[feed_id]["favicon_fetching"] = feed.get("favicon_fetching") - user_subs = UserSubscription.objects.filter(user=user, active=True).only('feed') + user_subs = UserSubscription.objects.filter(user=user, active=True).only("feed") sub_feed_ids = [s.feed_id for s in user_subs] if favicons_fetching: @@ -586,15 +612,15 @@ def refresh_feeds(request): if duplicate_feeds and duplicate_feeds[0].feed.pk in feeds: feeds[moved_feed_id] = feeds[duplicate_feeds[0].feed_id] - feeds[moved_feed_id]['dupe_feed_id'] = duplicate_feeds[0].feed_id - + feeds[moved_feed_id]["dupe_feed_id"] = duplicate_feeds[0].feed_id + if check_fetch_status: missing_feed_ids = list(set(feed_ids) - set(sub_feed_ids)) if missing_feed_ids: duplicate_feeds = DuplicateFeed.objects.filter(duplicate_feed_id__in=missing_feed_ids) for duplicate_feed in duplicate_feeds: - feeds[duplicate_feed.duplicate_feed_id] = {'id': duplicate_feed.feed_id} - + feeds[duplicate_feed.duplicate_feed_id] = {"id": duplicate_feed.feed_id} + interactions_count = MInteraction.user_unread_count(user.pk) if True or settings.DEBUG or check_fetch_status: @@ -602,21 +628,28 @@ def refresh_feeds(request): extra_fetch = "" if check_fetch_status or favicons_fetching: extra_fetch = "(%s/%s)" % (check_fetch_status, len(favicons_fetching)) - logging.user(request, "~FBRefreshing %s+%s feeds %s (%.4s/%.4s/%.4s)" % ( - len(list(feeds.keys())), len(list(social_feeds.keys())), extra_fetch, - (checkpoint1-start).total_seconds(), - (checkpoint2-start).total_seconds(), - (end-start).total_seconds(), - )) - - MAnalyticsLoader.add(page_load=time.time()-start_time) - + logging.user( + request, + "~FBRefreshing %s+%s feeds %s (%.4s/%.4s/%.4s)" + % ( + len(list(feeds.keys())), + len(list(social_feeds.keys())), + extra_fetch, + (checkpoint1 - start).total_seconds(), + (checkpoint2 - start).total_seconds(), + (end - start).total_seconds(), + ), + ) + + MAnalyticsLoader.add(page_load=time.time() - start_time) + return { - 'feeds': feeds, - 'social_feeds': social_feeds, - 'interactions_count': interactions_count, + "feeds": feeds, + "social_feeds": social_feeds, + "interactions_count": interactions_count, } + @json.json_view def interactions_count(request): user = get_user(request) @@ -624,9 +657,10 @@ def interactions_count(request): interactions_count = MInteraction.user_unread_count(user.pk) return { - 'interactions_count': interactions_count, + "interactions_count": interactions_count, } - + + @never_cache @ajax_login_required @json.json_view @@ -634,12 +668,12 @@ def feed_unread_count(request): get_post = getattr(request, request.method) start = time.time() user = request.user - feed_ids = get_post.getlist('feed_id') or get_post.getlist('feed_id[]') - - force = request.GET.get('force', False) - social_feed_ids = [feed_id for feed_id in feed_ids if 'social:' in feed_id] + feed_ids = get_post.getlist("feed_id") or get_post.getlist("feed_id[]") + + force = request.GET.get("force", False) + social_feed_ids = [feed_id for feed_id in feed_ids if "social:" in feed_id] feed_ids = list(set(feed_ids) - set(social_feed_ids)) - + feeds = {} if feed_ids: feeds = UserSubscription.feeds_with_updated_counts(user, feed_ids=feed_ids, force=force) @@ -647,71 +681,74 @@ def feed_unread_count(request): social_feeds = {} if social_feed_ids: social_feeds = MSocialSubscription.feeds_with_updated_counts(user, social_feed_ids=social_feed_ids) - + if len(feed_ids) == 1: if settings.DEBUG: feed_title = Feed.get_by_id(feed_ids[0]).feed_title else: feed_title = feed_ids[0] elif len(social_feed_ids) == 1: - social_profile = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace('social:', '')) + social_profile = MSocialProfile.objects.get(user_id=social_feed_ids[0].replace("social:", "")) feed_title = social_profile.user.username if social_profile.user else "[deleted]" else: feed_title = "%s feeds" % (len(feeds) + len(social_feeds)) logging.user(request, "~FBUpdating unread count on: %s" % feed_title) - MAnalyticsLoader.add(page_load=time.time()-start) - - return {'feeds': feeds, 'social_feeds': social_feeds} - + MAnalyticsLoader.add(page_load=time.time() - start) + + return {"feeds": feeds, "social_feeds": social_feeds} + + def refresh_feed(request, feed_id): start = time.time() user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) - + feed = feed.update(force=True, compute_scores=False) usersub = UserSubscription.objects.get(user=user, feed=feed) usersub.calculate_feed_scores(silent=False) - + logging.user(request, "~FBRefreshing feed: %s" % feed) - MAnalyticsLoader.add(page_load=time.time()-start) - + MAnalyticsLoader.add(page_load=time.time() - start) + return load_single_feed(request, feed_id) - + + @never_cache @json.json_view def load_single_feed(request, feed_id): - start = time.time() - user = get_user(request) + start = time.time() + user = get_user(request) # offset = int(request.GET.get('offset', 0)) # limit = int(request.GET.get('limit', 6)) - limit = 6 - page = int(request.GET.get('page', 1)) - delay = int(request.GET.get('delay', 0)) - offset = limit * (page-1) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'all') - query = request.GET.get('query', '').strip() - include_story_content = is_true(request.GET.get('include_story_content', True)) - include_hidden = is_true(request.GET.get('include_hidden', False)) - include_feeds = is_true(request.GET.get('include_feeds', False)) - message = None - user_search = None - + limit = 6 + page = int(request.GET.get("page", 1)) + delay = int(request.GET.get("delay", 0)) + offset = limit * (page - 1) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "all") + query = request.GET.get("query", "").strip() + include_story_content = is_true(request.GET.get("include_story_content", True)) + include_hidden = is_true(request.GET.get("include_hidden", False)) + include_feeds = is_true(request.GET.get("include_feeds", False)) + message = None + user_search = None + dupe_feed_id = None user_profiles = [] now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - if not feed_id: raise Http404 + if not feed_id: + raise Http404 - feed_address = request.GET.get('feed_address') + feed_address = request.GET.get("feed_address") feed = Feed.get_by_id(feed_id, feed_address=feed_address) if not feed: raise Http404 - + try: usersub = UserSubscription.objects.get(user=user, feed=feed) except UserSubscription.DoesNotExist: usersub = None - + if feed.is_newsletter and not usersub: # User must be subscribed to a newsletter in order to read it raise Http404 @@ -719,11 +756,11 @@ def load_single_feed(request, feed_id): if feed.num_subscribers == 1 and not usersub and not user.is_staff: # This feed could be private so user must be subscribed in order to read it raise Http404 - + if page > 400: logging.user(request, "~BR~FK~SBOver page 400 on single feed: %s" % page) raise Http404 - + if query: if user.profile.is_premium: user_search = MUserSearch.get_user(user.pk) @@ -732,178 +769,199 @@ def load_single_feed(request, feed_id): else: stories = [] message = "You must be a premium subscriber to search." - elif read_filter == 'starred': - mstories = MStarredStory.objects( - user_id=user.pk, - story_feed_id=feed_id - ).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit] - stories = Feed.format_stories(mstories) - elif usersub and read_filter == 'unread': + elif read_filter == "starred": + mstories = MStarredStory.objects(user_id=user.pk, story_feed_id=feed_id).order_by( + "%sstarred_date" % ("-" if order == "newest" else "") + )[offset : offset + limit] + stories = Feed.format_stories(mstories) + elif usersub and read_filter == "unread": stories = usersub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit) else: stories = feed.get_stories(offset, limit, order=order) - + checkpoint1 = time.time() - + try: stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk) except redis.ConnectionError: logging.user(request, "~BR~FK~SBRedis is unavailable for shared stories.") checkpoint2 = time.time() - + # Get intelligence classifier for user - + if usersub and usersub.is_trained: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, feed_id=feed_id, social_user_id=0)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id=feed_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id=feed_id)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id=feed_id)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - classifiers = get_classifiers_for_user(user, feed_id=feed_id, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + classifiers = get_classifiers_for_user( + user, + feed_id=feed_id, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) checkpoint3 = time.time() - + unread_story_hashes = [] if stories: - if (read_filter == 'all' or query) and usersub: - unread_story_hashes = UserSubscription.story_hashes(user.pk, read_filter='unread', - feed_ids=[usersub.feed_id], - usersubs=[usersub], - cutoff_date=user.profile.unread_cutoff) - story_hashes = [story['story_hash'] for story in stories if story['story_hash']] - starred_stories = MStarredStory.objects(user_id=user.pk, - story_feed_id=feed.pk, - story_hash__in=story_hashes)\ - .hint([('user_id', 1), ('story_hash', 1)]) + if (read_filter == "all" or query) and usersub: + unread_story_hashes = UserSubscription.story_hashes( + user.pk, + read_filter="unread", + feed_ids=[usersub.feed_id], + usersubs=[usersub], + cutoff_date=user.profile.unread_cutoff, + ) + story_hashes = [story["story_hash"] for story in stories if story["story_hash"]] + starred_stories = MStarredStory.objects( + user_id=user.pk, story_feed_id=feed.pk, story_hash__in=story_hashes + ).hint([("user_id", 1), ("story_hash", 1)]) shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes) shared_stories = [] if shared_story_hashes: - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=shared_story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - starred_stories = dict([(story.story_hash, story) - for story in starred_stories]) - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=shared_story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + starred_stories = dict([(story.story_hash, story) for story in starred_stories]) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) + checkpoint4 = time.time() - + for story in stories: if not include_story_content: - del story['story_content'] - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) + del story["story_content"] + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) nowtz = localtime_for_timezone(now, user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) if usersub: - story['read_status'] = 1 - if not user.profile.is_archive and story['story_date'] < user.profile.unread_cutoff: - story['read_status'] = 1 - elif (read_filter == 'all' or query) and usersub: - story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0 - elif read_filter == 'unread' and usersub: - story['read_status'] = 0 - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_story = Feed.format_story(starred_stories[story['story_hash']]) - starred_date = localtime_for_timezone(starred_story['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['user_tags'] = starred_story['user_tags'] - story['user_notes'] = starred_story['user_notes'] - story['highlights'] = starred_story['highlights'] - if story['story_hash'] in shared_stories: - story['shared'] = True - shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'], - user.profile.timezone) - story['shared_date'] = format_story_link_date__long(shared_date, now) - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) + story["read_status"] = 1 + if not user.profile.is_archive and story["story_date"] < user.profile.unread_cutoff: + story["read_status"] = 1 + elif (read_filter == "all" or query) and usersub: + story["read_status"] = 1 if story["story_hash"] not in unread_story_hashes else 0 + elif read_filter == "unread" and usersub: + story["read_status"] = 0 + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_story = Feed.format_story(starred_stories[story["story_hash"]]) + starred_date = localtime_for_timezone(starred_story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["user_tags"] = starred_story["user_tags"] + story["user_notes"] = starred_story["user_notes"] + story["highlights"] = starred_story["highlights"] + if story["story_hash"] in shared_stories: + story["shared"] = True + shared_date = localtime_for_timezone( + shared_stories[story["story_hash"]]["shared_date"], user.profile.timezone + ) + story["shared_date"] = format_story_link_date__long(shared_date, now) + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) else: - story['read_status'] = 1 - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, feed), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 1 + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, feed), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - + story["score"] = UserSubscription.score_story(story["intelligence"]) + # Intelligence feed_tags = json.decode(feed.data.popular_tags) if feed.data.popular_tags else [] feed_authors = json.decode(feed.data.popular_authors) if feed.data.popular_authors else [] - + if include_feeds: - feeds = Feed.objects.filter(pk__in=set([story['story_feed_id'] for story in stories])) + feeds = Feed.objects.filter(pk__in=set([story["story_feed_id"] for story in stories])) feeds = [f.canonical(include_favicon=False) for f in feeds] - + if usersub: usersub.feed_opens += 1 usersub.needs_unread_recalc = True try: - usersub.save(update_fields=['feed_opens', 'needs_unread_recalc']) + usersub.save(update_fields=["feed_opens", "needs_unread_recalc"]) except DatabaseError as e: logging.user(request, f"~BR~FK~SBNo changes in usersub, ignoring... {e}") - - diff1 = checkpoint1-start - diff2 = checkpoint2-start - diff3 = checkpoint3-start - diff4 = checkpoint4-start - timediff = time.time()-start + + diff1 = checkpoint1 - start + diff2 = checkpoint2 - start + diff3 = checkpoint3 - start + diff4 = checkpoint4 - start + timediff = time.time() - start last_update = relative_timesince(feed.last_update) time_breakdown = "" if timediff > 1 or settings.DEBUG: - time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % ( - diff1, diff2, diff3, diff4) - + time_breakdown = "~SN~FR(~SB%.4s/%.4s/%.4s/%.4s~SN)" % (diff1, diff2, diff3, diff4) + search_log = "~SN~FG(~SB%s~SN) " % query if query else "" - logging.user(request, "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" % ( - feed.feed_title[:22], ('~SN/p%s' % page) if page > 1 else '', order, read_filter, search_log, time_breakdown)) - + logging.user( + request, + "~FYLoading feed: ~SB%s%s (%s/%s) %s%s" + % ( + feed.feed_title[:22], + ("~SN/p%s" % page) if page > 1 else "", + order, + read_filter, + search_log, + time_breakdown, + ), + ) + MAnalyticsLoader.add(page_load=timediff) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time - RStats.add('page_load', duration=seconds) + RStats.add("page_load", duration=seconds) if not include_hidden: hidden_stories_removed = 0 new_stories = [] for story in stories: - if story['score'] >= 0: + if story["score"] >= 0: new_stories.append(story) else: hidden_stories_removed += 1 stories = new_stories - - data = dict(stories=stories, - user_profiles=user_profiles, - feed_tags=feed_tags, - feed_authors=feed_authors, - classifiers=classifiers, - updated=last_update, - user_search=user_search, - feed_id=feed.pk, - elapsed_time=round(float(timediff), 2), - message=message) - - if include_feeds: data['feeds'] = feeds - if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed - if dupe_feed_id: data['dupe_feed_id'] = dupe_feed_id + + data = dict( + stories=stories, + user_profiles=user_profiles, + feed_tags=feed_tags, + feed_authors=feed_authors, + classifiers=classifiers, + updated=last_update, + user_search=user_search, + feed_id=feed.pk, + elapsed_time=round(float(timediff), 2), + message=message, + ) + + if include_feeds: + data["feeds"] = feeds + if not include_hidden: + data["hidden_stories_removed"] = hidden_stories_removed + if dupe_feed_id: + data["dupe_feed_id"] = dupe_feed_id if not usersub: data.update(feed.canonical()) # if not usersub and feed.num_subscribers <= 1: # data = dict(code=-1, message="You must be subscribed to this feed.") - + # time.sleep(random.randint(1, 3)) if delay and user.is_staff: # time.sleep(random.randint(2, 7) / 10.0) @@ -917,13 +975,14 @@ def load_single_feed(request, feed_id): return data + def load_feed_page(request, feed_id): if not feed_id: raise Http404 - + feed = Feed.get_by_id(feed_id) if feed and feed.has_page and not feed.has_page_exception: - if settings.BACKED_BY_AWS.get('pages_on_node'): + if settings.BACKED_BY_AWS.get("pages_on_node"): domain = Site.objects.get_current().domain url = "https://%s/original_page/%s" % ( domain, @@ -936,180 +995,193 @@ def load_feed_page(request, feed_id): page_response = None if page_response and page_response.status_code == 200: response = HttpResponse(page_response.content, content_type="text/html; charset=utf-8") - response['Content-Encoding'] = 'deflate' - response['Last-Modified'] = page_response.headers.get('Last-modified') - response['Etag'] = page_response.headers.get('Etag') - response['Content-Length'] = str(len(page_response.content)) - logging.user(request, "~FYLoading original page (%s), proxied from node: ~SB%s bytes" % - (feed_id, len(page_response.content))) + response["Content-Encoding"] = "deflate" + response["Last-Modified"] = page_response.headers.get("Last-modified") + response["Etag"] = page_response.headers.get("Etag") + response["Content-Length"] = str(len(page_response.content)) + logging.user( + request, + "~FYLoading original page (%s), proxied from node: ~SB%s bytes" + % (feed_id, len(page_response.content)), + ) return response - - if settings.BACKED_BY_AWS['pages_on_s3'] and feed.s3_page: + + if settings.BACKED_BY_AWS["pages_on_s3"] and feed.s3_page: if settings.PROXY_S3_PAGES: key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=feed.s3_pages_key) if key: compressed_data = key.get()["Body"] response = HttpResponse(compressed_data, content_type="text/html; charset=utf-8") - response['Content-Encoding'] = 'gzip' - - logging.user(request, "~FYLoading original page, proxied: ~SB%s bytes" % - (len(compressed_data))) + response["Content-Encoding"] = "gzip" + + logging.user( + request, "~FYLoading original page, proxied: ~SB%s bytes" % (len(compressed_data)) + ) return response else: logging.user(request, "~FYLoading original page, non-proxied") - return HttpResponseRedirect('//%s/%s' % (settings.S3_PAGES_BUCKET_NAME, - feed.s3_pages_key)) - + return HttpResponseRedirect("//%s/%s" % (settings.S3_PAGES_BUCKET_NAME, feed.s3_pages_key)) + data = MFeedPage.get_data(feed_id=feed_id) - + if not data or not feed or not feed.has_page or feed.has_page_exception: logging.user(request, "~FYLoading original page, ~FRmissing") - return render(request, 'static/404_original_page.xhtml', {}, - content_type='text/html', - status=404) - + return render(request, "static/404_original_page.xhtml", {}, content_type="text/html", status=404) + logging.user(request, "~FYLoading original page, from the db") return HttpResponse(data, content_type="text/html; charset=utf-8") + @json.json_view def load_starred_stories(request): - user = get_user(request) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 10)) - page = int(request.GET.get('page', 0)) - query = request.GET.get('query', '').strip() - order = request.GET.get('order', 'newest') - tag = request.GET.get('tag') - highlights = is_true(request.GET.get('highlights', False)) - story_hashes = request.GET.getlist('h') or request.GET.getlist('h[]') + user = get_user(request) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 10)) + page = int(request.GET.get("page", 0)) + query = request.GET.get("query", "").strip() + order = request.GET.get("order", "newest") + tag = request.GET.get("tag") + highlights = is_true(request.GET.get("highlights", False)) + story_hashes = request.GET.getlist("h") or request.GET.getlist("h[]") story_hashes = story_hashes[:100] - version = int(request.GET.get('v', 1)) - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - message = None - order_by = '-' if order == "newest" else "" - if page: offset = limit * (page - 1) - + version = int(request.GET.get("v", 1)) + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + message = None + order_by = "-" if order == "newest" else "" + if page: + offset = limit * (page - 1) + if query: - # results = SearchStarredStory.query(user.pk, query) - # story_ids = [result.db_id for result in results] + # results = SearchStarredStory.query(user.pk, query) + # story_ids = [result.db_id for result in results] if user.profile.is_premium: - stories = MStarredStory.find_stories(query, user.pk, tag=tag, offset=offset, limit=limit, - order=order) + stories = MStarredStory.find_stories( + query, user.pk, tag=tag, offset=offset, limit=limit, order=order + ) else: stories = [] message = "You must be a premium subscriber to search." elif highlights: if user.profile.is_premium: mstories = MStarredStory.objects( - user_id=user.pk, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"} - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] - stories = Feed.format_stories(mstories) + user_id=user.pk, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ).order_by("%sstarred_date" % order_by)[offset : offset + limit] + stories = Feed.format_stories(mstories) else: stories = [] message = "You must be a premium subscriber to read through saved story highlights." elif tag: if user.profile.is_premium: - mstories = MStarredStory.objects( - user_id=user.pk, - user_tags__contains=tag - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] - stories = Feed.format_stories(mstories) + mstories = MStarredStory.objects(user_id=user.pk, user_tags__contains=tag).order_by( + "%sstarred_date" % order_by + )[offset : offset + limit] + stories = Feed.format_stories(mstories) else: stories = [] message = "You must be a premium subscriber to read saved stories by tag." elif story_hashes: limit = 100 - mstories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] + mstories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).order_by( + "%sstarred_date" % order_by + )[offset : offset + limit] stories = Feed.format_stories(mstories) else: - mstories = MStarredStory.objects( - user_id=user.pk - ).order_by('%sstarred_date' % order_by)[offset:offset+limit] + mstories = MStarredStory.objects(user_id=user.pk).order_by("%sstarred_date" % order_by)[ + offset : offset + limit + ] stories = Feed.format_stories(mstories) - + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - - story_hashes = [story['story_hash'] for story in stories] - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) - usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk') - usersub_ids = [us['feed__pk'] for us in usersub_ids] + + story_hashes = [story["story_hash"] for story in stories] + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) + usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values( + "feed__pk" + ) + usersub_ids = [us["feed__pk"] for us in usersub_ids] unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids))) - unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) - unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds) + unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) + unsub_feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in unsub_feeds) for story in stories: - if story['story_feed_id'] in unsub_feeds: continue - duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=story['story_feed_id']) - if not duplicate_feed: continue + if story["story_feed_id"] in unsub_feeds: + continue + duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=story["story_feed_id"]) + if not duplicate_feed: + continue feed_id = duplicate_feed[0].feed_id try: - saved_story = MStarredStory.objects.get(user_id=user.pk, story_hash=story['story_hash']) + saved_story = MStarredStory.objects.get(user_id=user.pk, story_hash=story["story_hash"]) saved_story.feed_id = feed_id - _, story_hash = MStory.split_story_hash(story['story_hash']) + _, story_hash = MStory.split_story_hash(story["story_hash"]) saved_story.story_hash = "%s:%s" % (feed_id, story_hash) saved_story.story_feed_id = feed_id - story['story_hash'] = saved_story.story_hash - story['story_feed_id'] = saved_story.story_feed_id + story["story_hash"] = saved_story.story_hash + story["story_feed_id"] = saved_story.story_feed_id saved_story.save() - logging.user(request, "~FCSaving new feed for starred story: ~SB%s -> %s" % (story['story_hash'], feed_id)) + logging.user( + request, "~FCSaving new feed for starred story: ~SB%s -> %s" % (story["story_hash"], feed_id) + ) except (MStarredStory.DoesNotExist, MStarredStory.MultipleObjectsReturned): - logging.user(request, "~FCCan't find feed for starred story: ~SB%s" % (story['story_hash'])) + logging.user(request, "~FCCan't find feed for starred story: ~SB%s" % (story["story_hash"])) continue - + shared_story_hashes = MSharedStory.check_shared_story_hashes(user.pk, story_hashes) shared_stories = [] if shared_story_hashes: - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=shared_story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=shared_story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - starred_date = localtime_for_timezone(story['starred_date'], user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, nowtz) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['read_status'] = 1 - story['starred'] = True - story['intelligence'] = { - 'feed': 1, - 'author': 0, - 'tags': 0, - 'title': 0, + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + starred_date = localtime_for_timezone(story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, nowtz) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["read_status"] = 1 + story["starred"] = True + story["intelligence"] = { + "feed": 1, + "author": 0, + "tags": 0, + "title": 0, } - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" logging.user(request, "~FCLoading starred stories: ~SB%s stories %s" % (len(stories), search_log)) - + return { "stories": stories, "user_profiles": user_profiles, - 'feeds': list(unsub_feeds.values()) if version == 2 else unsub_feeds, + "feeds": list(unsub_feeds.values()) if version == 2 else unsub_feeds, "message": message, } + @json.json_view def starred_story_hashes(request): - user = get_user(request) - include_timestamps = is_true(request.GET.get('include_timestamps', False)) - - mstories = MStarredStory.objects( - user_id=user.pk - ).only('story_hash', 'starred_date', 'starred_updated').order_by('-starred_date') - + user = get_user(request) + include_timestamps = is_true(request.GET.get("include_timestamps", False)) + + mstories = ( + MStarredStory.objects(user_id=user.pk) + .only("story_hash", "starred_date", "starred_updated") + .order_by("-starred_date") + ) + if include_timestamps: story_hashes = [] for s in mstories: @@ -1119,21 +1191,22 @@ def starred_story_hashes(request): story_hashes.append((s.story_hash, date.strftime("%s"))) else: story_hashes = [s.story_hash for s in mstories] - - logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" % - (len(story_hashes))) + + logging.user(request, "~FYLoading ~FCstarred story hashes~FY: %s story hashes" % (len(story_hashes))) return dict(starred_story_hashes=story_hashes) + def starred_stories_rss_feed(request, user_id, secret_token): return starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug=None) + def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug): try: user = User.objects.get(pk=user_id) except User.DoesNotExist: raise Http404 - + if tag_slug: try: tag_counts = MStarredStoryCounts.objects.get(user_id=user_id, slug=tag_slug) @@ -1143,160 +1216,181 @@ def starred_stories_rss_feed_tag(request, user_id, secret_token, tag_slug): raise Http404 else: _, starred_count = MStarredStoryCounts.user_counts(user.pk, include_total=True) - + data = {} if tag_slug: - data['title'] = "Saved Stories - %s" % tag_counts.tag + data["title"] = "Saved Stories - %s" % tag_counts.tag else: - data['title'] = "Saved Stories" - data['link'] = "%s%s" % ( + data["title"] = "Saved Stories" + data["link"] = "%s%s" % ( settings.NEWSBLUR_URL, - reverse('saved-stories-tag', kwargs=dict(tag_name=tag_slug))) + reverse("saved-stories-tag", kwargs=dict(tag_name=tag_slug)), + ) if tag_slug: - data['description'] = "Stories saved by %s on NewsBlur with the tag \"%s\"." % (user.username, - tag_counts.tag) + data["description"] = 'Stories saved by %s on NewsBlur with the tag "%s".' % ( + user.username, + tag_counts.tag, + ) else: - data['description'] = "Stories saved by %s on NewsBlur." % (user.username) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "%s%s" % ( + data["description"] = "Stories saved by %s on NewsBlur." % (user.username) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "%s%s" % ( settings.NEWSBLUR_URL, - reverse('starred-stories-rss-feed-tag', - kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug)), + reverse( + "starred-stories-rss-feed-tag", + kwargs=dict(user_id=user_id, secret_token=secret_token, tag_slug=tag_slug), + ), ) rss = feedgenerator.Atom1Feed(**data) if not tag_slug or not tag_counts.tag: - starred_stories = MStarredStory.objects( - user_id=user.pk - ).order_by('-starred_date').limit(25) + starred_stories = MStarredStory.objects(user_id=user.pk).order_by("-starred_date").limit(25) elif tag_counts.is_highlights: - starred_stories = MStarredStory.objects( - user_id=user.pk, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"} - ).order_by('-starred_date').limit(25) + starred_stories = ( + MStarredStory.objects( + user_id=user.pk, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ) + .order_by("-starred_date") + .limit(25) + ) else: - starred_stories = MStarredStory.objects( - user_id=user.pk, - user_tags__contains=tag_counts.tag - ).order_by('-starred_date').limit(25) + starred_stories = ( + MStarredStory.objects(user_id=user.pk, user_tags__contains=tag_counts.tag) + .order_by("-starred_date") + .limit(25) + ) starred_stories = Feed.format_stories(starred_stories) for starred_story in starred_stories: story_data = { - 'title': smart_str(starred_story['story_title']), - 'link': starred_story['story_permalink'], - 'description': smart_str(starred_story['story_content']), - 'author_name': starred_story['story_authors'], - 'categories': starred_story['story_tags'], - 'unique_id': starred_story['story_permalink'], - 'pubdate': starred_story['starred_date'], + "title": smart_str(starred_story["story_title"]), + "link": starred_story["story_permalink"], + "description": smart_str(starred_story["story_content"]), + "author_name": starred_story["story_authors"], + "categories": starred_story["story_tags"], + "unique_id": starred_story["story_permalink"], + "pubdate": starred_story["starred_date"], } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" % ( - user.username, - tag_counts.tag if tag_slug else "[All stories]", - tag_counts.count if tag_slug else starred_count, - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, + "~FBGenerating ~SB%s~SN's saved story RSS feed (%s, %s stories): ~FM%s" + % ( + user.username, + tag_counts.tag if tag_slug else "[All stories]", + tag_counts.count if tag_slug else starred_count, + request.META.get("HTTP_USER_AGENT", "")[:24], + ), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug): domain = Site.objects.get_current().domain - date_hack_2023 = (datetime.datetime.now() > datetime.datetime(2023, 7, 1)) + date_hack_2023 = datetime.datetime.now() > datetime.datetime(2023, 7, 1) try: user = User.objects.get(pk=user_id) except User.DoesNotExist: raise Http404 - + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=user) feed_ids, folder_title = user_sub_folders.feed_ids_under_folder_slug(folder_slug) - + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids) if feed_ids and ((user.profile.is_archive and date_hack_2023) or (not date_hack_2023)): params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "offset": 0, "limit": 20, - "order": 'newest', - "read_filter": 'all', - "cache_prefix": "RSS:" + "order": "newest", + "read_filter": "all", + "cache_prefix": "RSS:", } story_hashes, unread_feed_story_hashes = UserSubscription.feed_stories(**params) else: story_hashes = [] - mstories = MStory.objects(story_hash__in=story_hashes).order_by('-story_date') + mstories = MStory.objects(story_hash__in=story_hashes).order_by("-story_date") stories = Feed.format_stories(mstories) - + filtered_stories = [] - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] - found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) + found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids, - social_user_id=0)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids, social_user_id=0) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - - sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + + sort_classifiers_by_feed( + user=user, + feed_ids=found_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) for story in stories: - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - if unread_filter == 'focus' and story['score'] >= 1: + story["score"] = UserSubscription.score_story(story["intelligence"]) + if unread_filter == "focus" and story["score"] >= 1: filtered_stories.append(story) - elif unread_filter == 'unread' and story['score'] >= 0: + elif unread_filter == "unread" and story["score"] >= 0: filtered_stories.append(story) stories = filtered_stories - + data = {} - data['title'] = "%s from %s (%s sites)" % (folder_title, user.username, len(feed_ids)) - data['link'] = "https://%s%s" % ( - domain, - reverse('folder', kwargs=dict(folder_name=folder_title))) - data['description'] = "Unread stories in %s on NewsBlur. From %s's account and contains %s sites." % ( + data["title"] = "%s from %s (%s sites)" % (folder_title, user.username, len(feed_ids)) + data["link"] = "https://%s%s" % (domain, reverse("folder", kwargs=dict(folder_name=folder_title))) + data["description"] = "Unread stories in %s on NewsBlur. From %s's account and contains %s sites." % ( folder_title, user.username, - len(feed_ids)) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "https://%s%s" % ( + len(feed_ids), + ) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "https://%s%s" % ( domain, - reverse('folder-rss-feed', - kwargs=dict(user_id=user_id, secret_token=secret_token, unread_filter=unread_filter, folder_slug=folder_slug)), + reverse( + "folder-rss-feed", + kwargs=dict( + user_id=user_id, + secret_token=secret_token, + unread_filter=unread_filter, + folder_slug=folder_slug, + ), + ), ) rss = feedgenerator.Atom1Feed(**data) for story in stories: - feed = Feed.get_by_id(story['story_feed_id']) + feed = Feed.get_by_id(story["story_feed_id"]) feed_title = feed.feed_title if feed else "" try: usersub = UserSubscription.objects.get(user=user, feed=feed) @@ -1304,58 +1398,59 @@ def folder_rss_feed(request, user_id, secret_token, unread_filter, folder_slug): feed_title = usersub.user_title except UserSubscription.DoesNotExist: usersub = None - + story_content = """%s

%s""" % ( - smart_str(story['story_content']), + smart_str(story["story_content"]), Site.objects.get_current().domain, - story['story_feed_id'], + story["story_feed_id"], feed_title, ) - story_content = re.sub(r'[\x00-\x08\x0B-\x0C\x0E-\x1F]', '', story_content) - story_title = "%s%s" % (("%s: " % feed_title) if feed_title else "", story['story_title']) + story_content = re.sub(r"[\x00-\x08\x0B-\x0C\x0E-\x1F]", "", story_content) + story_title = "%s%s" % (("%s: " % feed_title) if feed_title else "", story["story_title"]) story_data = { - 'title': story_title, - 'link': story['story_permalink'], - 'description': story_content, - 'categories': story['story_tags'], - 'unique_id': 'https://%s/site/%s/%s/' % (domain, story['story_feed_id'], story['guid_hash']), - 'pubdate': localtime_for_timezone(story['story_date'], user.profile.timezone), + "title": story_title, + "link": story["story_permalink"], + "description": story_content, + "categories": story["story_tags"], + "unique_id": "https://%s/site/%s/%s/" % (domain, story["story_feed_id"], story["guid_hash"]), + "pubdate": localtime_for_timezone(story["story_date"], user.profile.timezone), } - if story['story_authors']: - story_data['author_name'] = story['story_authors'] + if story["story_authors"]: + story_data["author_name"] = story["story_authors"] rss.add_item(**story_data) # TODO: Remove below date hack to accomodate users who paid for premium but want folder rss if not user.profile.is_archive and date_hack_2023: story_data = { - 'title': "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", - 'link': "https://%s/?next=premium" % domain, - 'description': "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", - 'unique_id': "https://%s/premium_only" % domain, - 'pubdate': localtime_for_timezone(datetime.datetime.now(), user.profile.timezone), + "title": "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", + "link": "https://%s/?next=premium" % domain, + "description": "You must have a premium archive subscription on NewsBlur to have RSS feeds for folders.", + "unique_id": "https://%s/premium_only" % domain, + "pubdate": localtime_for_timezone(datetime.datetime.now(), user.profile.timezone), } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's folder RSS feed (%s, %s stories): ~FM%s" % ( - user.username, - folder_title, - len(stories), - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, + "~FBGenerating ~SB%s~SN's folder RSS feed (%s, %s stories): ~FM%s" + % (user.username, folder_title, len(stories), request.META.get("HTTP_USER_AGENT", "")[:24]), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + @json.json_view def load_read_stories(request): - user = get_user(request) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 10)) - page = int(request.GET.get('page', 0)) - order = request.GET.get('order', 'newest') - query = request.GET.get('query', '').strip() - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + user = get_user(request) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 10)) + page = int(request.GET.get("page", 0)) + order = request.GET.get("order", "newest") + query = request.GET.get("query", "").strip() + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) message = None - if page: offset = limit * (page - 1) - + if page: + offset = limit * (page - 1) + if query: stories = [] message = "Not implemented yet." @@ -1368,58 +1463,65 @@ def load_read_stories(request): story_hashes = RUserStory.get_read_stories(user.pk, offset=offset, limit=limit, order=order) mstories = MStory.objects(story_hash__in=story_hashes) stories = Feed.format_stories(mstories) - stories = sorted(stories, key=lambda story: story_hashes.index(story['story_hash']), - reverse=bool(order=="oldest")) - + stories = sorted( + stories, + key=lambda story: story_hashes.index(story["story_hash"]), + reverse=bool(order == "oldest"), + ) + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - - story_hashes = [story['story_hash'] for story in stories] - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) - usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values('feed__pk') - usersub_ids = [us['feed__pk'] for us in usersub_ids] + + story_hashes = [story["story_hash"] for story in stories] + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) + usersub_ids = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids).values( + "feed__pk" + ) + usersub_ids = [us["feed__pk"] for us in usersub_ids] unsub_feed_ids = list(set(story_feed_ids).difference(set(usersub_ids))) - unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) - unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] - - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - starred_stories = MStarredStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('user_id', 1), ('story_hash', 1)]) - starred_stories = dict([(story.story_hash, story) - for story in starred_stories]) - + unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) + unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] + + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).hint( + [("user_id", 1), ("story_hash", 1)] + ) + starred_stories = dict([(story.story_hash, story) for story in starred_stories]) + nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - story['read_status'] = 1 - story['intelligence'] = { - 'feed': 1, - 'author': 0, - 'tags': 0, - 'title': 0, + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + story["read_status"] = 1 + story["intelligence"] = { + "feed": 1, + "author": 0, + "tags": 0, + "title": 0, } - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_story = Feed.format_story(starred_stories[story['story_hash']]) - starred_date = localtime_for_timezone(starred_story['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_story = Feed.format_story(starred_stories[story["story_hash"]]) + starred_date = localtime_for_timezone(starred_story["starred_date"], user.profile.timezone) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" logging.user(request, "~FCLoading read stories: ~SB%s stories %s" % (len(stories), search_log)) - + return { "stories": stories, "user_profiles": user_profiles, @@ -1427,41 +1529,42 @@ def load_read_stories(request): "message": message, } + @json.json_view def load_river_stories__redis(request): # get_post is request.REQUEST, since this endpoint needs to handle either # GET or POST requests, since the parameters for this endpoint can be # very long, at which point the max size of a GET url request is exceeded. - get_post = getattr(request, request.method) - limit = int(get_post.get('limit', 12)) - start = time.time() - user = get_user(request) - message = None - feed_ids = get_post.getlist('feeds') or get_post.getlist('feeds[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] + get_post = getattr(request, request.method) + limit = int(get_post.get("limit", 12)) + start = time.time() + user = get_user(request) + message = None + feed_ids = get_post.getlist("feeds") or get_post.getlist("feeds[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] if not feed_ids: - feed_ids = get_post.getlist('f') or get_post.getlist('f[]') - feed_ids = [int(feed_id) for feed_id in get_post.getlist('f') if feed_id] - story_hashes = get_post.getlist('h') or get_post.getlist('h[]') - story_hashes = story_hashes[:100] - requested_hashes = len(story_hashes) + feed_ids = get_post.getlist("f") or get_post.getlist("f[]") + feed_ids = [int(feed_id) for feed_id in get_post.getlist("f") if feed_id] + story_hashes = get_post.getlist("h") or get_post.getlist("h[]") + story_hashes = story_hashes[:100] + requested_hashes = len(story_hashes) original_feed_ids = list(feed_ids) - page = int(get_post.get('page', 1)) - order = get_post.get('order', 'newest') - read_filter = get_post.get('read_filter', 'unread') - query = get_post.get('query', '').strip() - include_hidden = is_true(get_post.get('include_hidden', False)) - include_feeds = is_true(get_post.get('include_feeds', False)) - on_dashboard = is_true(get_post.get('dashboard', False)) or is_true(get_post.get('on_dashboard', False)) - infrequent = is_true(get_post.get('infrequent', False)) + page = int(get_post.get("page", 1)) + order = get_post.get("order", "newest") + read_filter = get_post.get("read_filter", "unread") + query = get_post.get("query", "").strip() + include_hidden = is_true(get_post.get("include_hidden", False)) + include_feeds = is_true(get_post.get("include_feeds", False)) + on_dashboard = is_true(get_post.get("dashboard", False)) or is_true(get_post.get("on_dashboard", False)) + infrequent = is_true(get_post.get("infrequent", False)) if infrequent: - infrequent = get_post.get('infrequent') - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - usersubs = [] - code = 1 - user_search = None - offset = (page-1) * limit - story_date_order = "%sstory_date" % ('' if order == 'oldest' else '-') + infrequent = get_post.get("infrequent") + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + usersubs = [] + code = 1 + user_search = None + offset = (page - 1) * limit + story_date_order = "%sstory_date" % ("" if order == "oldest" else "-") if user.pk == 86178: # Disable Michael_Novakhov account @@ -1470,46 +1573,47 @@ def load_river_stories__redis(request): if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) - + if story_hashes: unread_feed_story_hashes = None - read_filter = 'all' + read_filter = "all" mstories = MStory.objects(story_hash__in=story_hashes).order_by(story_date_order) stories = Feed.format_stories(mstories) elif query: if user.profile.is_premium: user_search = MUserSearch.get_user(user.pk) user_search.touch_search_date() - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter='all') + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter="all") feed_ids = [sub.feed_id for sub in usersubs] if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) stories = Feed.find_feed_stories(feed_ids, query, order=order, offset=offset, limit=limit) mstories = stories - unread_feed_story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids, - read_filter="unread", order=order, - cutoff_date=user.profile.unread_cutoff) + unread_feed_story_hashes = UserSubscription.story_hashes( + user.pk, + feed_ids=feed_ids, + read_filter="unread", + order=order, + cutoff_date=user.profile.unread_cutoff, + ) else: stories = [] mstories = [] message = "You must be a premium subscriber to search." - elif read_filter == 'starred': - mstories = MStarredStory.objects( - user_id=user.pk, - story_feed_id__in=feed_ids - ).order_by('%sstarred_date' % ('-' if order == 'newest' else ''))[offset:offset+limit] - stories = Feed.format_stories(mstories) + elif read_filter == "starred": + mstories = MStarredStory.objects(user_id=user.pk, story_feed_id__in=feed_ids).order_by( + "%sstarred_date" % ("-" if order == "newest" else "") + )[offset : offset + limit] + stories = Feed.format_stories(mstories) else: - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter=read_filter) + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter=read_filter) all_feed_ids = [f for f in feed_ids] feed_ids = [sub.feed_id for sub in usersubs] if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) if feed_ids: params = { - "user_id": user.pk, + "user_id": user.pk, "feed_ids": feed_ids, "all_feed_ids": all_feed_ids, "offset": offset, @@ -1527,91 +1631,101 @@ def load_river_stories__redis(request): mstories = MStory.objects(story_hash__in=story_hashes[:limit]).order_by(story_date_order) stories = Feed.format_stories(mstories) - - found_feed_ids = list(set([story['story_feed_id'] for story in stories])) + + found_feed_ids = list(set([story["story_feed_id"] for story in stories])) stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk) - + if not usersubs: - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids, - read_filter=read_filter) - + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=found_feed_ids, read_filter=read_filter) + trained_feed_ids = [sub.feed_id for sub in usersubs if sub.is_trained] found_trained_feed_ids = list(set(trained_feed_ids) & set(found_feed_ids)) # Find starred stories if found_feed_ids: - if read_filter == 'starred': + if read_filter == "starred": starred_stories = mstories else: - story_hashes = [s['story_hash'] for s in stories] - starred_stories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes) - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags, - highlights=story.highlights, - user_notes=story.user_notes)) - for story in starred_stories]) + story_hashes = [s["story_hash"] for s in stories] + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes) + starred_stories = dict( + [ + ( + story.story_hash, + dict( + starred_date=story.starred_date, + user_tags=story.user_tags, + highlights=story.highlights, + user_notes=story.user_notes, + ), + ) + for story in starred_stories + ] + ) else: starred_stories = {} - + # Intelligence classifiers for all feeds involved if found_trained_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids, - social_user_id=0)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=found_trained_feed_ids)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids, social_user_id=0) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids) + ) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=found_trained_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - classifiers = sort_classifiers_by_feed(user=user, feed_ids=found_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) - + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=found_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) + # Just need to format stories nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - if read_filter == 'starred': - story['read_status'] = 1 + if read_filter == "starred": + story["read_status"] = 1 else: - story['read_status'] = 0 - if read_filter == 'all' or query: - if (unread_feed_story_hashes is not None and - story['story_hash'] not in unread_feed_story_hashes): - story['read_status'] = 1 - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['starred_timestamp'] = int(starred_date.timestamp()) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - story['user_notes'] = starred_stories[story['story_hash']]['user_notes'] - story['highlights'] = starred_stories[story['story_hash']]['highlights'] - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 0 + if read_filter == "all" or query: + if unread_feed_story_hashes is not None and story["story_hash"] not in unread_feed_story_hashes: + story["read_status"] = 1 + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["starred_timestamp"] = int(starred_date.timestamp()) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + story["user_notes"] = starred_stories[story["story_hash"]]["user_notes"] + story["highlights"] = starred_stories[story["story_hash"]]["highlights"] + story["intelligence"] = { + "feed": apply_classifier_feeds(classifier_feeds, story["story_feed_id"]), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - story['score'] = UserSubscription.score_story(story['intelligence']) - + story["score"] = UserSubscription.score_story(story["intelligence"]) + if include_feeds: - feeds = Feed.objects.filter(pk__in=set([story['story_feed_id'] for story in stories])) + feeds = Feed.objects.filter(pk__in=set([story["story_feed_id"] for story in stories])) feeds = [feed.canonical(include_favicon=False) for feed in feeds] - + if not user.profile.is_premium and not include_feeds: message = "The full River of News is a premium feature." code = 0 @@ -1623,57 +1737,79 @@ def load_river_stories__redis(request): hidden_stories_removed = 0 new_stories = [] for story in stories: - if story['score'] >= 0: + if story["score"] >= 0: new_stories.append(story) else: hidden_stories_removed += 1 stories = new_stories - + # if page > 1: # import random # time.sleep(random.randint(10, 16)) - + diff = time.time() - start timediff = round(float(diff), 2) if requested_hashes and story_hashes: - logging.user(request, "~FB%sLoading ~FC%s~FB stories: %s%s" % - ("~FBAuto-" if on_dashboard else "", - requested_hashes, story_hashes[:3], f"...(+{len(story_hashes)-3})" if len(story_hashes) > 3 else "")) + logging.user( + request, + "~FB%sLoading ~FC%s~FB stories: %s%s" + % ( + "~FBAuto-" if on_dashboard else "", + requested_hashes, + story_hashes[:3], + f"...(+{len(story_hashes)-3})" if len(story_hashes) > 3 else "", + ), + ) else: - logging.user(request, "~FY%sLoading ~FC%sriver stories~FY: ~SBp%s~SN (%s/%s " - "stories, ~SN%s/%s/%s feeds, %s/%s)" % - ("~FCAuto-" if on_dashboard else "", - "~FB~SBinfrequent~SN~FC " if infrequent else "", - page, len(stories), len(mstories), len(found_feed_ids), - len(feed_ids), len(original_feed_ids), order, read_filter)) - - if not on_dashboard and not (requested_hashes and story_hashes): - MAnalyticsLoader.add(page_load=diff) # Only count full pages, not individual stories - if hasattr(request, 'start_time'): - seconds = time.time() - request.start_time - RStats.add('page_load', duration=seconds) + logging.user( + request, + "~FY%sLoading ~FC%sriver stories~FY: ~SBp%s~SN (%s/%s " + "stories, ~SN%s/%s/%s feeds, %s/%s)" + % ( + "~FCAuto-" if on_dashboard else "", + "~FB~SBinfrequent~SN~FC " if infrequent else "", + page, + len(stories), + len(mstories), + len(found_feed_ids), + len(feed_ids), + len(original_feed_ids), + order, + read_filter, + ), + ) - data = dict(code=code, - message=message, - stories=stories, - classifiers=classifiers, - elapsed_time=timediff, - user_search=user_search, - user_profiles=user_profiles) - - if include_feeds: data['feeds'] = feeds - if not include_hidden: data['hidden_stories_removed'] = hidden_stories_removed + if not on_dashboard and not (requested_hashes and story_hashes): + MAnalyticsLoader.add(page_load=diff) # Only count full pages, not individual stories + if hasattr(request, "start_time"): + seconds = time.time() - request.start_time + RStats.add("page_load", duration=seconds) + + data = dict( + code=code, + message=message, + stories=stories, + classifiers=classifiers, + elapsed_time=timediff, + user_search=user_search, + user_profiles=user_profiles, + ) + if include_feeds: + data["feeds"] = feeds + if not include_hidden: + data["hidden_stories_removed"] = hidden_stories_removed return data + @json.json_view def load_river_stories_widget(request): logging.user(request, "Widget load") river_stories_data = json.decode(load_river_stories__redis(request).content) timeout = 3 start = time.time() - + def load_url(url): original_url = url url = urllib.parse.urljoin(settings.NEWSBLUR_URL, url) @@ -1686,105 +1822,122 @@ def load_url(url): pass if not conn: # logging.user(request.user, '"%s" wasn\'t fetched, trying again: %s' % (url, e)) - url = url.replace('localhost', 'haproxy') + url = url.replace("localhost", "haproxy") try: conn = urllib.request.urlopen(url, context=scontext, timeout=timeout) except (urllib.error.HTTPError, urllib.error.URLError, socket.timeout) as e: - logging.user(request.user, '~FB"%s" ~FRnot fetched~FB in %ss: ~SB%s' % (url, (time.time() - start), e)) + logging.user( + request.user, '~FB"%s" ~FRnot fetched~FB in %ss: ~SB%s' % (url, (time.time() - start), e) + ) return None data = conn.read() if not url.startswith("data:"): - data = base64.b64encode(data).decode('utf-8') + data = base64.b64encode(data).decode("utf-8") logging.user(request.user, '~FB"%s" ~SBfetched~SN in ~SB%ss' % (url, (time.time() - start))) return dict(url=original_url, data=data) - + # Find the image thumbnails and download in parallel thumbnail_urls = [] - for story in river_stories_data['stories']: - thumbnail_values = list(story['secure_image_thumbnails'].values()) + for story in river_stories_data["stories"]: + thumbnail_values = list(story["secure_image_thumbnails"].values()) for thumbnail_value in thumbnail_values: - if 'data:' in thumbnail_value: + if "data:" in thumbnail_value: continue thumbnail_urls.append(thumbnail_value) break with concurrent.futures.ThreadPoolExecutor(max_workers=6) as executor: pages = executor.map(load_url, thumbnail_urls) - + # Reassemble thumbnails back into stories thumbnail_data = dict() for page in pages: - if not page: continue - thumbnail_data[page['url']] = page['data'] - for story in river_stories_data['stories']: - thumbnail_values = list(story['secure_image_thumbnails'].values()) + if not page: + continue + thumbnail_data[page["url"]] = page["data"] + for story in river_stories_data["stories"]: + thumbnail_values = list(story["secure_image_thumbnails"].values()) if thumbnail_values and thumbnail_values[0] in thumbnail_data: page_url = thumbnail_values[0] - story['select_thumbnail_data'] = thumbnail_data[page_url] - + story["select_thumbnail_data"] = thumbnail_data[page_url] + logging.user(request, ("Elapsed Time: %ss" % (time.time() - start))) - + return river_stories_data - + + @json.json_view def complete_river(request): - user = get_user(request) - feed_ids = request.POST.getlist('feeds') or request.POST.getlist('feeds[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id and feed_id.isnumeric()] - page = int(request.POST.get('page', 1)) - read_filter = request.POST.get('read_filter', 'unread') + user = get_user(request) + feed_ids = request.POST.getlist("feeds") or request.POST.getlist("feeds[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id and feed_id.isnumeric()] + page = int(request.POST.get("page", 1)) + read_filter = request.POST.get("read_filter", "unread") stories_truncated = 0 - - usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, - read_filter=read_filter) + + usersubs = UserSubscription.subs_for_feeds(user.pk, feed_ids=feed_ids, read_filter=read_filter) feed_ids = [sub.feed_id for sub in usersubs] if feed_ids: - stories_truncated = UserSubscription.truncate_river(user.pk, feed_ids, read_filter, cache_prefix="dashboard:") - + stories_truncated = UserSubscription.truncate_river( + user.pk, feed_ids, read_filter, cache_prefix="dashboard:" + ) + if page >= 1: - logging.user(request, "~FC~BBRiver complete on page ~SB%s~SN, truncating ~SB%s~SN stories from ~SB%s~SN feeds" % (page, stories_truncated, len(feed_ids))) - + logging.user( + request, + "~FC~BBRiver complete on page ~SB%s~SN, truncating ~SB%s~SN stories from ~SB%s~SN feeds" + % (page, stories_truncated, len(feed_ids)), + ) + return dict(code=1, message="Truncated %s stories from %s" % (stories_truncated, len(feed_ids))) + @json.json_view def unread_story_hashes(request): - user = get_user(request) - feed_ids = request.GET.getlist('feed_id') or request.GET.getlist('feed_id[]') - feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] - include_timestamps = is_true(request.GET.get('include_timestamps', False)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'unread') - - story_hashes = UserSubscription.story_hashes(user.pk, feed_ids=feed_ids, - order=order, read_filter=read_filter, - include_timestamps=include_timestamps, - group_by_feed=True, - cutoff_date=user.profile.unread_cutoff) - - logging.user(request, "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" % - (len(feed_ids), len(story_hashes))) + user = get_user(request) + feed_ids = request.GET.getlist("feed_id") or request.GET.getlist("feed_id[]") + feed_ids = [int(feed_id) for feed_id in feed_ids if feed_id] + include_timestamps = is_true(request.GET.get("include_timestamps", False)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "unread") + + story_hashes = UserSubscription.story_hashes( + user.pk, + feed_ids=feed_ids, + order=order, + read_filter=read_filter, + include_timestamps=include_timestamps, + group_by_feed=True, + cutoff_date=user.profile.unread_cutoff, + ) + + logging.user( + request, + "~FYLoading ~FCunread story hashes~FY: ~SB%s feeds~SN (%s story hashes)" + % (len(feed_ids), len(story_hashes)), + ) return dict(unread_feed_story_hashes=story_hashes) + @ajax_login_required @json.json_view def mark_all_as_read(request): code = 1 try: - days = int(request.POST.get('days', 0)) + days = int(request.POST.get("days", 0)) except ValueError: - return dict(code=-1, message="Days parameter must be an integer, not: %s" % - request.POST.get('days')) + return dict(code=-1, message="Days parameter must be an integer, not: %s" % request.POST.get("days")) read_date = datetime.datetime.utcnow() - datetime.timedelta(days=days) - + feeds = UserSubscription.objects.filter(user=request.user) - infrequent = is_true(request.POST.get('infrequent', False)) + infrequent = is_true(request.POST.get("infrequent", False)) if infrequent: - infrequent = request.POST.get('infrequent') + infrequent = request.POST.get("infrequent") feed_ids = Feed.low_volume_feeds([usersub.feed.pk for usersub in feeds], stories_per_month=infrequent) feeds = UserSubscription.objects.filter(user=request.user, feed_id__in=feed_ids) - + socialsubs = MSocialSubscription.objects.filter(user_id=request.user.pk) for subtype in [feeds, socialsubs]: for sub in subtype: @@ -1795,39 +1948,45 @@ def mark_all_as_read(request): sub.needs_unread_recalc = True sub.mark_read_date = read_date sub.save() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - - logging.user(request, "~FMMarking %s as read: ~SB%s days" % (("all" if not infrequent else "infrequent stories"), days,)) + r.publish(request.user.username, "reload:feeds") + + logging.user( + request, + "~FMMarking %s as read: ~SB%s days" + % ( + ("all" if not infrequent else "infrequent stories"), + days, + ), + ) return dict(code=code) - + + @ajax_login_required @json.json_view def mark_story_as_read(request): - story_ids = request.POST.getlist('story_id') or request.POST.getlist('story_id[]') + story_ids = request.POST.getlist("story_id") or request.POST.getlist("story_id[]") try: - feed_id = int(get_argument_or_404(request, 'feed_id')) + feed_id = int(get_argument_or_404(request, "feed_id")) except ValueError: - return dict(code=-1, errors=["You must pass a valid feed_id: %s" % - request.POST.get('feed_id')]) - + return dict(code=-1, errors=["You must pass a valid feed_id: %s" % request.POST.get("feed_id")]) + try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: feed_id = duplicate_feed[0].feed_id try: - usersub = UserSubscription.objects.get(user=request.user, - feed=duplicate_feed[0].feed) - except (Feed.DoesNotExist): + usersub = UserSubscription.objects.get(user=request.user, feed=duplicate_feed[0].feed) + except Feed.DoesNotExist: return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id]) else: return dict(code=-1, errors=["No feed exists for feed_id %d." % feed_id]) except UserSubscription.DoesNotExist: usersub = None - + if usersub: data = usersub.mark_story_ids_as_read(story_ids, request=request) else: @@ -1835,30 +1994,33 @@ def mark_story_as_read(request): return data + @ajax_login_required @json.json_view def mark_story_hashes_as_read(request): - retrying_failed = is_true(request.POST.get('retrying_failed', False)) + retrying_failed = is_true(request.POST.get("retrying_failed", False)) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) try: - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") except UnreadablePostError: return dict(code=-1, message="Missing `story_hash` list parameter.") - - feed_ids, friend_ids = RUserStory.mark_story_hashes_read(request.user.pk, story_hashes, username=request.user.username) + + feed_ids, friend_ids = RUserStory.mark_story_hashes_read( + request.user.pk, story_hashes, username=request.user.username + ) if request.user.profile.is_archive: RUserUnreadStory.mark_read(request.user.pk, story_hashes) - + if friend_ids: socialsubs = MSocialSubscription.objects.filter( - user_id=request.user.pk, - subscription_user_id__in=friend_ids) + user_id=request.user.pk, subscription_user_id__in=friend_ids + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() - r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id) + r.publish(request.user.username, "social:%s" % socialsub.subscription_user_id) # Also count on original subscription for feed_id in feed_ids: @@ -1868,55 +2030,59 @@ def mark_story_hashes_as_read(request): usersub.last_read_date = datetime.datetime.now() if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc', 'last_read_date']) + usersub.save(update_fields=["needs_unread_recalc", "last_read_date"]) else: - usersub.save(update_fields=['last_read_date']) - r.publish(request.user.username, 'feed:%s' % feed_id) - + usersub.save(update_fields=["last_read_date"]) + r.publish(request.user.username, "feed:%s" % feed_id) + hash_count = len(story_hashes) - logging.user(request, "~FYRead %s %s: %s %s" % ( - hash_count, 'story' if hash_count == 1 else 'stories', - story_hashes, - '(retrying failed)' if retrying_failed else '')) + logging.user( + request, + "~FYRead %s %s: %s %s" + % ( + hash_count, + "story" if hash_count == 1 else "stories", + story_hashes, + "(retrying failed)" if retrying_failed else "", + ), + ) + + return dict(code=1, story_hashes=story_hashes, feed_ids=feed_ids, friend_user_ids=friend_ids) - return dict(code=1, story_hashes=story_hashes, - feed_ids=feed_ids, friend_user_ids=friend_ids) @ajax_login_required @json.json_view def mark_feed_stories_as_read(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - feeds_stories = request.POST.get('feeds_stories', "{}") + feeds_stories = request.POST.get("feeds_stories", "{}") feeds_stories = json.decode(feeds_stories) - data = { - 'code': -1, - 'message': 'Nothing was marked as read' - } - + data = {"code": -1, "message": "Nothing was marked as read"} + for feed_id, story_ids in list(feeds_stories.items()): try: feed_id = int(feed_id) except ValueError: continue try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) data = usersub.mark_story_ids_as_read(story_ids, request=request) except UserSubscription.DoesNotExist: return dict(code=-1, error="You are not subscribed to this feed_id: %d" % feed_id) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) try: - if not duplicate_feed: raise Feed.DoesNotExist - usersub = UserSubscription.objects.get(user=request.user, - feed=duplicate_feed[0].feed) + if not duplicate_feed: + raise Feed.DoesNotExist + usersub = UserSubscription.objects.get(user=request.user, feed=duplicate_feed[0].feed) data = usersub.mark_story_ids_as_read(story_ids, request=request) except (UserSubscription.DoesNotExist, Feed.DoesNotExist): return dict(code=-1, error="No feed exists for feed_id: %d" % feed_id) - r.publish(request.user.username, 'feed:%s' % feed_id) - + r.publish(request.user.username, "feed:%s" % feed_id) + return data - + + @ajax_login_required @json.json_view def mark_social_stories_as_read(request): @@ -1924,103 +2090,113 @@ def mark_social_stories_as_read(request): errors = [] data = {} r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - users_feeds_stories = request.POST.get('users_feeds_stories', "{}") + users_feeds_stories = request.POST.get("users_feeds_stories", "{}") users_feeds_stories = json.decode(users_feeds_stories) for social_user_id, feeds in list(users_feeds_stories.items()): for feed_id, story_ids in list(feeds.items()): feed_id = int(feed_id) try: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=social_user_id) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) data = socialsub.mark_story_ids_as_read(story_ids, feed_id, request=request) except OperationError as e: code = -1 errors.append("Already read story: %s" % e) except MSocialSubscription.DoesNotExist: - MSocialSubscription.mark_unsub_story_ids_as_read(request.user.pk, social_user_id, - story_ids, feed_id, - request=request) + MSocialSubscription.mark_unsub_story_ids_as_read( + request.user.pk, social_user_id, story_ids, feed_id, request=request + ) except Feed.DoesNotExist: duplicate_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if duplicate_feed: try: - socialsub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=social_user_id) - data = socialsub.mark_story_ids_as_read(story_ids, duplicate_feed[0].feed.pk, request=request) + socialsub = MSocialSubscription.objects.get( + user_id=request.user.pk, subscription_user_id=social_user_id + ) + data = socialsub.mark_story_ids_as_read( + story_ids, duplicate_feed[0].feed.pk, request=request + ) except (UserSubscription.DoesNotExist, Feed.DoesNotExist): code = -1 errors.append("No feed exists for feed_id %d." % feed_id) else: continue - r.publish(request.user.username, 'feed:%s' % feed_id) - r.publish(request.user.username, 'social:%s' % social_user_id) + r.publish(request.user.username, "feed:%s" % feed_id) + r.publish(request.user.username, "social:%s" % social_user_id) data.update(code=code, errors=errors) return data - -@required_params('story_id', feed_id=int) + + +@required_params("story_id", feed_id=int) @ajax_login_required @json.json_view def mark_story_as_unread(request): - story_id = request.POST.get('story_id', None) - feed_id = int(request.POST.get('feed_id', 0)) - + story_id = request.POST.get("story_id", None) + feed_id = int(request.POST.get("feed_id", 0)) + try: - usersub = UserSubscription.objects.select_related('feed').get(user=request.user, feed=feed_id) + usersub = UserSubscription.objects.select_related("feed").get(user=request.user, feed=feed_id) feed = usersub.feed except UserSubscription.DoesNotExist: usersub = None feed = Feed.get_by_id(feed_id) - + if usersub and not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc']) - + usersub.save(update_fields=["needs_unread_recalc"]) + data = dict(code=0, payload=dict(story_id=story_id)) - + story, found_original = MStory.find_story(feed_id, story_id) - + if not story: logging.user(request, "~FY~SBUnread~SN story in feed: %s (NOT FOUND)" % (feed)) return dict(code=-1, message="Story not found.") message = RUserStory.story_can_be_marked_unread_by_user(story, request.user) if message: - data['code'] = -1 - data['message'] = message + data["code"] = -1 + data["message"] = message return data - + if usersub: data = usersub.invert_read_stories_after_unread_story(story, request) - - social_subs = MSocialSubscription.mark_dirty_sharing_story(user_id=request.user.pk, - story_feed_id=feed_id, - story_guid_hash=story.guid_hash) + + social_subs = MSocialSubscription.mark_dirty_sharing_story( + user_id=request.user.pk, story_feed_id=feed_id, story_guid_hash=story.guid_hash + ) dirty_count = social_subs and social_subs.count() dirty_count = ("(%s social_subs)" % dirty_count) if dirty_count else "" RUserStory.mark_story_hash_unread(request.user, story_hash=story.story_hash) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) logging.user(request, "~FY~SBUnread~SN story in feed: %s %s" % (feed, dirty_count)) - + return data + @ajax_login_required @json.json_view -@required_params('story_hash') +@required_params("story_hash") def mark_story_hash_as_unread(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") is_list = len(story_hashes) > 1 datas = [] for story_hash in story_hashes: feed_id, _ = MStory.split_story_hash(story_hash) story, _ = MStory.find_story(feed_id, story_hash) if not story: - data = dict(code=-1, message="That story has been removed from the feed, no need to mark it unread.", story_hash=story_hash) + data = dict( + code=-1, + message="That story has been removed from the feed, no need to mark it unread.", + story_hash=story_hash, + ) if not is_list: return data else: @@ -2032,28 +2208,28 @@ def mark_story_hash_as_unread(request): return data else: datas.append(data) - + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=request.user.pk, feed=feed_id) if usersubs: usersub = usersubs[0] if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True - usersub.save(update_fields=['needs_unread_recalc']) + usersub.save(update_fields=["needs_unread_recalc"]) data = usersub.invert_read_stories_after_unread_story(story, request) - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) feed_id, friend_ids = RUserStory.mark_story_hash_unread(request.user, story_hash) if friend_ids: socialsubs = MSocialSubscription.objects.filter( - user_id=request.user.pk, - subscription_user_id__in=friend_ids) + user_id=request.user.pk, subscription_user_id__in=friend_ids + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc: socialsub.needs_unread_recalc = True socialsub.save() - r.publish(request.user.username, 'social:%s' % socialsub.subscription_user_id) + r.publish(request.user.username, "social:%s" % socialsub.subscription_user_id) logging.user(request, "~FYUnread story in feed/socialsubs: %s/%s" % (feed_id, friend_ids)) @@ -2065,35 +2241,38 @@ def mark_story_hash_as_unread(request): return datas + @ajax_login_required @json.json_view def mark_feed_as_read(request): r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - feed_ids = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') - cutoff_timestamp = int(request.POST.get('cutoff_timestamp', 0)) - direction = request.POST.get('direction', 'older') - infrequent = is_true(request.POST.get('infrequent', False)) + feed_ids = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") + cutoff_timestamp = int(request.POST.get("cutoff_timestamp", 0)) + direction = request.POST.get("direction", "older") + infrequent = is_true(request.POST.get("infrequent", False)) if infrequent: - infrequent = request.POST.get('infrequent') + infrequent = request.POST.get("infrequent") multiple = len(feed_ids) > 1 code = 1 errors = [] cutoff_date = datetime.datetime.fromtimestamp(cutoff_timestamp) if cutoff_timestamp else None - + if infrequent: feed_ids = Feed.low_volume_feeds(feed_ids, stories_per_month=infrequent) - feed_ids = [str(f) for f in feed_ids] # This method expects strings - + feed_ids = [str(f) for f in feed_ids] # This method expects strings + if cutoff_date: - logging.user(request, "~FMMark %s feeds read, %s - cutoff: %s/%s" % - (len(feed_ids), direction, cutoff_timestamp, cutoff_date)) - + logging.user( + request, + "~FMMark %s feeds read, %s - cutoff: %s/%s" + % (len(feed_ids), direction, cutoff_timestamp, cutoff_date), + ) + for feed_id in feed_ids: - if 'social:' in feed_id: - user_id = int(feed_id.replace('social:', '')) + if "social:" in feed_id: + user_id = int(feed_id.replace("social:", "")) try: - sub = MSocialSubscription.objects.get(user_id=request.user.pk, - subscription_user_id=user_id) + sub = MSocialSubscription.objects.get(user_id=request.user.pk, subscription_user_id=user_id) except MSocialSubscription.DoesNotExist: logging.user(request, "~FRCouldn't find socialsub: %s" % user_id) continue @@ -2109,61 +2288,63 @@ def mark_feed_as_read(request): except (Feed.DoesNotExist, UserSubscription.DoesNotExist) as e: errors.append("User not subscribed: %s" % e) continue - except (ValueError) as e: + except ValueError as e: errors.append("Invalid feed_id: %s" % e) continue if not sub: errors.append("User not subscribed: %s" % feed_id) continue - + try: if direction == "older": marked_read = sub.mark_feed_read(cutoff_date=cutoff_date) else: marked_read = sub.mark_newer_stories_read(cutoff_date=cutoff_date) if marked_read and not multiple: - r.publish(request.user.username, 'feed:%s' % feed_id) + r.publish(request.user.username, "feed:%s" % feed_id) except IntegrityError as e: errors.append("Could not mark feed as read: %s" % e) code = -1 - + if multiple: logging.user(request, "~FMMarking ~SB%s~SN feeds as read" % len(feed_ids)) - r.publish(request.user.username, 'refresh:%s' % ','.join(feed_ids)) - + r.publish(request.user.username, "refresh:%s" % ",".join(feed_ids)) + if errors: logging.user(request, "~FMMarking read had errors: ~FR%s" % errors) - + return dict(code=code, errors=errors, cutoff_date=cutoff_date, direction=direction) + def _parse_user_info(user): return { - 'user_info': { - 'is_anonymous': json.encode(user.is_anonymous), - 'is_authenticated': json.encode(user.is_authenticated), - 'username': json.encode(user.username if user.is_authenticated else 'Anonymous') + "user_info": { + "is_anonymous": json.encode(user.is_anonymous), + "is_authenticated": json.encode(user.is_authenticated), + "username": json.encode(user.username if user.is_authenticated else "Anonymous"), } } + @ajax_login_required @json.json_view def add_url(request): code = 0 - url = request.POST['url'] - folder = request.POST.get('folder', '').replace('river:', '') - new_folder = request.POST.get('new_folder', '').replace('river:', '') - auto_active = is_true(request.POST.get('auto_active', 1)) - skip_fetch = is_true(request.POST.get('skip_fetch', False)) + url = request.POST["url"] + folder = request.POST.get("folder", "").replace("river:", "") + new_folder = request.POST.get("new_folder", "").replace("river:", "") + auto_active = is_true(request.POST.get("auto_active", 1)) + skip_fetch = is_true(request.POST.get("skip_fetch", False)) feed = None - + if not url: code = -1 - message = 'Enter in the website address or the feed URL.' + message = "Enter in the website address or the feed URL." elif any([(banned_url in url) for banned_url in BANNED_URLS]): code = -1 message = "The publisher of this website has banned NewsBlur." - elif re.match('(https?://)?twitter.com/\w+/?$', url): + elif re.match("(https?://)?twitter.com/\w+/?$", url): if not request.user.profile.is_premium: message = "You must be a premium subscriber to add Twitter feeds." code = -1 @@ -2177,7 +2358,7 @@ def add_url(request): except tweepy.TweepError: code = -1 message = "Your Twitter connection isn't setup. Go to Manage - Friends/Followers and reconnect Twitter." - + if code == -1: return dict(code=code, message=message) @@ -2186,25 +2367,26 @@ def add_url(request): usf.add_folder(folder, new_folder) folder = new_folder - code, message, us = UserSubscription.add_subscription(user=request.user, feed_address=url, - folder=folder, auto_active=auto_active, - skip_fetch=skip_fetch) + code, message, us = UserSubscription.add_subscription( + user=request.user, feed_address=url, folder=folder, auto_active=auto_active, skip_fetch=skip_fetch + ) feed = us and us.feed if feed: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:%s' % feed.pk) + r.publish(request.user.username, "reload:%s" % feed.pk) MUserSearch.schedule_index_feeds_for_search(feed.pk, request.user.pk) - + return dict(code=code, message=message, feed=feed) + @ajax_login_required @json.json_view def add_folder(request): - folder = request.POST['folder'].replace('river:', '') - parent_folder = request.POST.get('parent_folder', '').replace('river:', '') + folder = request.POST["folder"].replace("river:", "") + parent_folder = request.POST.get("parent_folder", "").replace("river:", "") folders = None logging.user(request, "~FRAdding Folder: ~SB%s (in %s)" % (folder, parent_folder)) - + if folder: code = 1 message = "" @@ -2212,43 +2394,45 @@ def add_folder(request): user_sub_folders_object.add_folder(parent_folder, folder) folders = json.decode(user_sub_folders_object.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") else: code = -1 message = "Gotta write in a folder name." - + return dict(code=code, message=message, folders=folders) + @ajax_login_required @json.json_view def delete_feed(request): - feed_id = int(request.POST['feed_id']) - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if not in_folder or in_folder == ' ': + feed_id = int(request.POST["feed_id"]) + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if not in_folder or in_folder == " ": in_folder = "" - + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) user_sub_folders.delete_feed(feed_id, in_folder) - + feed = Feed.objects.filter(pk=feed_id) if feed: feed[0].count_subscribers() - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, message="Removed %s from '%s'." % (feed, in_folder)) + @ajax_login_required @json.json_view def delete_feed_by_url(request): message = "" code = 0 - url = request.POST['url'] - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if in_folder == ' ': + url = request.POST["url"] + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if in_folder == " ": in_folder = "" - + logging.user(request.user, "~FBFinding feed (delete_feed_by_url): %s" % url) feed = Feed.get_feed_from_url(url, create=False) if feed: @@ -2261,19 +2445,22 @@ def delete_feed_by_url(request): else: code = -1 message = "URL not found." - + return dict(code=code, message=message) - + + @ajax_login_required @json.json_view def delete_folder(request): - folder_to_delete = request.POST.get('folder_name') or request.POST.get('folder_to_delete') - in_folder = request.POST.get('in_folder', None) - feed_ids_in_folder = request.POST.getlist('feed_id') or request.POST.getlist('feed_id[]') + folder_to_delete = request.POST.get("folder_name") or request.POST.get("folder_to_delete") + in_folder = request.POST.get("in_folder", None) + feed_ids_in_folder = request.POST.getlist("feed_id") or request.POST.getlist("feed_id[]") feed_ids_in_folder = [int(f) for f in feed_ids_in_folder if f] - request.user.profile.send_opml_export_email(reason="You have deleted an entire folder of feeds, so here's a backup of all of your subscriptions just in case.") - + request.user.profile.send_opml_export_email( + reason="You have deleted an entire folder of feeds, so here's a backup of all of your subscriptions just in case." + ) + # Works piss poor with duplicate folder titles, if they are both in the same folder. # Deletes all, but only in the same folder parent. But nobody should be doing that, right? user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) @@ -2281,19 +2468,21 @@ def delete_folder(request): folders = json.decode(user_sub_folders.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, folders=folders) -@required_params('feeds_by_folder') +@required_params("feeds_by_folder") @ajax_login_required @json.json_view def delete_feeds_by_folder(request): - feeds_by_folder = json.decode(request.POST['feeds_by_folder']) + feeds_by_folder = json.decode(request.POST["feeds_by_folder"]) + + request.user.profile.send_opml_export_email( + reason="You have deleted a number of feeds at once, so here's a backup of all of your subscriptions just in case." + ) - request.user.profile.send_opml_export_email(reason="You have deleted a number of feeds at once, so here's a backup of all of your subscriptions just in case.") - # Works piss poor with duplicate folder titles, if they are both in the same folder. # Deletes all, but only in the same folder parent. But nobody should be doing that, right? user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) @@ -2301,38 +2490,40 @@ def delete_feeds_by_folder(request): folders = json.decode(user_sub_folders.folders) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - + r.publish(request.user.username, "reload:feeds") + return dict(code=1, folders=folders) + @ajax_login_required @json.json_view def rename_feed(request): - feed = get_object_or_404(Feed, pk=int(request.POST['feed_id'])) + feed = get_object_or_404(Feed, pk=int(request.POST["feed_id"])) try: user_sub = UserSubscription.objects.get(user=request.user, feed=feed) except UserSubscription.DoesNotExist: return dict(code=-1, message=f"You are not subscribed to {feed.feed_title}") - - feed_title = request.POST['feed_title'] - - logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % ( - feed.feed_title, feed_title)) - + + feed_title = request.POST["feed_title"] + + logging.user(request, "~FRRenaming feed '~SB%s~SN' to: ~SB%s" % (feed.feed_title, feed_title)) + user_sub.user_title = feed_title user_sub.save() - + return dict(code=1) - + + @ajax_login_required @json.json_view def rename_folder(request): - folder_to_rename = request.POST.get('folder_name') or request.POST.get('folder_to_rename') - new_folder_name = request.POST['new_folder_name'] - in_folder = request.POST.get('in_folder', '').replace('river:', '') - if 'Top Level' in in_folder: in_folder = '' + folder_to_rename = request.POST.get("folder_name") or request.POST.get("folder_to_rename") + new_folder_name = request.POST["new_folder_name"] + in_folder = request.POST.get("in_folder", "").replace("river:", "") + if "Top Level" in in_folder: + in_folder = "" code = 0 - + # Works piss poor with duplicate folder titles, if they are both in the same folder. # renames all, but only in the same folder parent. But nobody should be doing that, right? if folder_to_rename and new_folder_name: @@ -2341,66 +2532,74 @@ def rename_folder(request): code = 1 else: code = -1 - + return dict(code=code) - + + @ajax_login_required @json.json_view def move_feed_to_folders(request): - feed_id = int(request.POST['feed_id']) - in_folders = request.POST.getlist('in_folders', '') or request.POST.getlist('in_folders[]', '') - to_folders = request.POST.getlist('to_folders', '') or request.POST.getlist('to_folders[]', '') + feed_id = int(request.POST["feed_id"]) + in_folders = request.POST.getlist("in_folders", "") or request.POST.getlist("in_folders[]", "") + to_folders = request.POST.getlist("to_folders", "") or request.POST.getlist("to_folders[]", "") user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_feed_to_folders(feed_id, in_folders=in_folders, - to_folders=to_folders) - + user_sub_folders = user_sub_folders.move_feed_to_folders( + feed_id, in_folders=in_folders, to_folders=to_folders + ) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @ajax_login_required @json.json_view def move_feed_to_folder(request): - feed_id = int(request.POST['feed_id']) - in_folder = request.POST.get('in_folder', '') - to_folder = request.POST.get('to_folder', '') + feed_id = int(request.POST["feed_id"]) + in_folder = request.POST.get("in_folder", "") + to_folder = request.POST.get("to_folder", "") user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder, - to_folder=to_folder) - + user_sub_folders = user_sub_folders.move_feed_to_folder(feed_id, in_folder=in_folder, to_folder=to_folder) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @ajax_login_required @json.json_view def move_folder_to_folder(request): - folder_name = request.POST['folder_name'] - in_folder = request.POST.get('in_folder', '') - to_folder = request.POST.get('to_folder', '') - + folder_name = request.POST["folder_name"] + in_folder = request.POST.get("in_folder", "") + to_folder = request.POST.get("to_folder", "") + user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) - user_sub_folders = user_sub_folders.move_folder_to_folder(folder_name, in_folder=in_folder, to_folder=to_folder) - + user_sub_folders = user_sub_folders.move_folder_to_folder( + folder_name, in_folder=in_folder, to_folder=to_folder + ) + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) -@required_params('feeds_by_folder', 'to_folder') + +@required_params("feeds_by_folder", "to_folder") @ajax_login_required @json.json_view def move_feeds_by_folder_to_folder(request): - feeds_by_folder = json.decode(request.POST['feeds_by_folder']) - to_folder = request.POST['to_folder'] - new_folder = request.POST.get('new_folder', None) + feeds_by_folder = json.decode(request.POST["feeds_by_folder"]) + to_folder = request.POST["to_folder"] + new_folder = request.POST.get("new_folder", None) + + request.user.profile.send_opml_export_email( + reason="You have moved a number of feeds at once, so here's a backup of all of your subscriptions just in case." + ) - request.user.profile.send_opml_export_email(reason="You have moved a number of feeds at once, so here's a backup of all of your subscriptions just in case.") - user_sub_folders = get_object_or_404(UserSubscriptionFolders, user=request.user) if new_folder: @@ -2408,44 +2607,50 @@ def move_feeds_by_folder_to_folder(request): to_folder = new_folder user_sub_folders = user_sub_folders.move_feeds_by_folder_to_folder(feeds_by_folder, to_folder) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') + r.publish(request.user.username, "reload:feeds") return dict(code=1, folders=json.decode(user_sub_folders.folders)) - + + @login_required def add_feature(request): if not request.user.is_staff: return HttpResponseForbidden() - code = -1 + code = -1 form = FeatureForm(request.POST) - + if form.is_valid(): form.save() code = 1 - return HttpResponseRedirect(reverse('index')) - + return HttpResponseRedirect(reverse("index")) + return dict(code=code) - + + @json.json_view def load_features(request): user = get_user(request) - page = max(int(request.GET.get('page', 0)), 0) + page = max(int(request.GET.get("page", 0)), 0) if page > 1: - logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page+1)) - features = list(Feature.objects.all()[page*3:(page+1)*3+1].values()) - features = [{ - 'description': f['description'], - 'date': localtime_for_timezone(f['date'], user.profile.timezone).strftime("%b %d, %Y") - } for f in features] + logging.user(request, "~FBBrowse features: ~SBPage #%s" % (page + 1)) + features = list(Feature.objects.all()[page * 3 : (page + 1) * 3 + 1].values()) + features = [ + { + "description": f["description"], + "date": localtime_for_timezone(f["date"], user.profile.timezone).strftime("%b %d, %Y"), + } + for f in features + ] return features + @ajax_login_required @json.json_view def save_feed_order(request): - folders = request.POST.get('folders') + folders = request.POST.get("folders") if folders: # Test that folders can be JSON decoded folders_list = json.decode(folders) @@ -2454,44 +2659,50 @@ def save_feed_order(request): user_sub_folders = UserSubscriptionFolders.objects.get(user=request.user) user_sub_folders.folders = folders user_sub_folders.save() - + return {} + @json.json_view def feeds_trainer(request): classifiers = [] - feed_id = request.GET.get('feed_id') + feed_id = request.GET.get("feed_id") user = get_user(request) usersubs = UserSubscription.objects.filter(user=user, active=True) - + if feed_id: feed = get_object_or_404(Feed, pk=feed_id) usersubs = usersubs.filter(feed=feed) - usersubs = usersubs.select_related('feed').order_by('-feed__stories_last_month') - + usersubs = usersubs.select_related("feed").order_by("-feed__stories_last_month") + for us in usersubs: if (not us.is_trained and us.feed.stories_last_month > 0) or feed_id: classifier = dict() - classifier['classifiers'] = get_classifiers_for_user(user, feed_id=us.feed.pk) - classifier['feed_id'] = us.feed_id - classifier['stories_last_month'] = us.feed.stories_last_month - classifier['num_subscribers'] = us.feed.num_subscribers - classifier['feed_tags'] = json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] - classifier['feed_authors'] = json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else [] + classifier["classifiers"] = get_classifiers_for_user(user, feed_id=us.feed.pk) + classifier["feed_id"] = us.feed_id + classifier["stories_last_month"] = us.feed.stories_last_month + classifier["num_subscribers"] = us.feed.num_subscribers + classifier["feed_tags"] = ( + json.decode(us.feed.data.popular_tags) if us.feed.data.popular_tags else [] + ) + classifier["feed_authors"] = ( + json.decode(us.feed.data.popular_authors) if us.feed.data.popular_authors else [] + ) classifiers.append(classifier) - + user.profile.has_trained_intelligence = True user.profile.save() - + logging.user(user, "~FGLoading Trainer: ~SB%s feeds" % (len(classifiers))) - + return classifiers + @ajax_login_required @json.json_view def save_feed_chooser(request): is_premium = request.user.profile.is_premium - approved_feeds = request.POST.getlist('approved_feeds') or request.POST.getlist('approved_feeds[]') + approved_feeds = request.POST.getlist("approved_feeds") or request.POST.getlist("approved_feeds[]") approved_feeds = [int(feed_id) for feed_id in approved_feeds if feed_id] approve_all = False if not is_premium: @@ -2500,7 +2711,7 @@ def save_feed_chooser(request): approve_all = True activated = 0 usersubs = UserSubscription.objects.filter(user=request.user) - + for sub in usersubs: try: if sub.feed_id in approved_feeds or approve_all: @@ -2515,32 +2726,31 @@ def save_feed_chooser(request): sub.save() except Feed.DoesNotExist: pass - + UserSubscription.queue_new_feeds(request.user) UserSubscription.refresh_stale_feeds(request.user, exclude_new=True) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'reload:feeds') - - logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % ( - activated, - usersubs.count() - )) - - return {'activated': activated} + r.publish(request.user.username, "reload:feeds") + + logging.user(request, "~BB~FW~SBFeed chooser: ~FC%s~SN/~SB%s" % (activated, usersubs.count())) + + return {"activated": activated} + @ajax_login_required def retrain_all_sites(request): for sub in UserSubscription.objects.filter(user=request.user): sub.is_trained = False sub.save() - + return feeds_trainer(request) - + + @login_required def activate_premium_account(request): try: - usersubs = UserSubscription.objects.select_related('feed').filter(user=request.user) + usersubs = UserSubscription.objects.select_related("feed").filter(user=request.user) for sub in usersubs: sub.active = True sub.save() @@ -2549,11 +2759,12 @@ def activate_premium_account(request): sub.feed.schedule_feed_fetch_immediately() except Exception as e: logging.user(request, "~BR~FWPremium activation failed: {e} {usersubs}") - + request.user.profile.is_premium = True request.user.profile.save() - - return HttpResponseRedirect(reverse('index')) + + return HttpResponseRedirect(reverse("index")) + @login_required def login_as(request): @@ -2561,69 +2772,74 @@ def login_as(request): logging.user(request, "~SKNON-STAFF LOGGING IN AS ANOTHER USER!") assert False return HttpResponseForbidden() - username = request.GET['user'] + username = request.GET["user"] user = get_object_or_404(User, username__iexact=username) user.backend = settings.AUTHENTICATION_BACKENDS[0] - login_user(request, user, backend='django.contrib.auth.backends.ModelBackend') - return HttpResponseRedirect(reverse('index')) - + login_user(request, user, backend="django.contrib.auth.backends.ModelBackend") + return HttpResponseRedirect(reverse("index")) + + def iframe_buster(request): logging.user(request, "~FB~SBiFrame bust!") return HttpResponse(status=204) -@required_params('story_id', feed_id=int) + +@required_params("story_id", feed_id=int) @ajax_login_required @json.json_view def mark_story_as_starred(request): return _mark_story_as_starred(request) - -@required_params('story_hash') + + +@required_params("story_hash") @ajax_login_required @json.json_view def mark_story_hash_as_starred(request): return _mark_story_as_starred(request) - + + def _mark_story_as_starred(request): - code = 1 - feed_id = int(request.POST.get('feed_id', 0)) - story_id = request.POST.get('story_id', None) - user_tags = request.POST.getlist('user_tags') or request.POST.getlist('user_tags[]') - user_notes = request.POST.get('user_notes', None) - highlights = request.POST.getlist('highlights') or request.POST.getlist('highlights[]') or [] - message = "" - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + code = 1 + feed_id = int(request.POST.get("feed_id", 0)) + story_id = request.POST.get("story_id", None) + user_tags = request.POST.getlist("user_tags") or request.POST.getlist("user_tags[]") + user_notes = request.POST.get("user_notes", None) + highlights = request.POST.getlist("highlights") or request.POST.getlist("highlights[]") or [] + message = "" + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") is_list = len(story_hashes) > 1 datas = [] if not len(story_hashes): - story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) + story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) if story: story_hashes = [story.story_hash] - + if not len(story_hashes): - return {'code': -1, 'message': "Could not find story to save."} - + return {"code": -1, "message": "Could not find story to save."} + for story_hash in story_hashes: - story, _ = MStory.find_story(story_hash=story_hash) + story, _ = MStory.find_story(story_hash=story_hash) if not story: logging.user(request, "~FCStarring ~FRfailed~FC: %s not found" % (story_hash)) - datas.append({'code': -1, 'message': "Could not save story, not found", 'story_hash': story_hash}) + datas.append({"code": -1, "message": "Could not save story, not found", "story_hash": story_hash}) continue feed_id = story and story.story_feed_id - - story_db = dict([(k, v) for k, v in list(story._data.items()) - if k is not None and v is not None]) + + story_db = dict([(k, v) for k, v in list(story._data.items()) if k is not None and v is not None]) # Pop all existing user-specific fields because we don't want to reuse them from the found story # in case MStory.find_story uses somebody else's saved/shared story (because the original is deleted) - story_db.pop('user_id', None) - story_db.pop('starred_date', None) - story_db.pop('id', None) - story_db.pop('user_tags', None) - story_db.pop('highlights', None) - story_db.pop('user_notes', None) - + story_db.pop("user_id", None) + story_db.pop("starred_date", None) + story_db.pop("id", None) + story_db.pop("user_tags", None) + story_db.pop("highlights", None) + story_db.pop("user_notes", None) + now = datetime.datetime.now() - story_values = dict(starred_date=now, user_tags=user_tags, highlights=highlights, user_notes=user_notes, **story_db) + story_values = dict( + starred_date=now, user_tags=user_tags, highlights=highlights, user_notes=user_notes, **story_db + ) params = dict(story_guid=story.story_guid, user_id=request.user.pk) starred_story = MStarredStory.objects(**params).limit(1) created = False @@ -2632,19 +2848,25 @@ def _mark_story_as_starred(request): removed_highlights = [] if not starred_story: params.update(story_values) - if 'story_latest_content_z' in params: - params.pop('story_latest_content_z') + if "story_latest_content_z" in params: + params.pop("story_latest_content_z") try: starred_story = MStarredStory.objects.create(**params) except OperationError as e: - logging.user(request, "~FCStarring ~FRfailed~FC: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], e)) - datas.append({'code': -1, 'message': "Could not save story due to: %s" % e, 'story_hash': story_hash}) - + logging.user( + request, "~FCStarring ~FRfailed~FC: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], e) + ) + datas.append( + {"code": -1, "message": "Could not save story due to: %s" % e, "story_hash": story_hash} + ) + created = True - MActivity.new_starred_story(user_id=request.user.pk, - story_title=story.story_title, - story_feed_id=feed_id, - story_id=starred_story.story_guid) + MActivity.new_starred_story( + user_id=request.user.pk, + story_title=story.story_title, + story_feed_id=feed_id, + story_id=starred_story.story_guid, + ) new_user_tags = user_tags new_highlights = highlights changed_user_notes = bool(user_notes) @@ -2660,57 +2882,74 @@ def _mark_story_as_starred(request): starred_story.highlights = highlights starred_story.user_notes = user_notes starred_story.save() - + if len(highlights) == 1 and len(new_highlights) == 1: MStarredStoryCounts.adjust_count(request.user.pk, highlights=True, amount=1) elif len(highlights) == 0 and len(removed_highlights): MStarredStoryCounts.adjust_count(request.user.pk, highlights=True, amount=-1) - + for tag in new_user_tags: MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=1) for tag in removed_user_tags: MStarredStoryCounts.adjust_count(request.user.pk, tag=tag, amount=-1) - + if random.random() < 0.01: MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) if not starred_count and len(starred_counts): - starred_count = MStarredStory.objects(user_id=request.user.pk).count() - + starred_count = MStarredStory.objects(user_id=request.user.pk).count() + if not changed_user_notes: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'story:starred:%s' % story.story_hash) - + r.publish(request.user.username, "story:starred:%s" % story.story_hash) + if created: - logging.user(request, "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags)) + logging.user( + request, + "~FCStarring: ~SB%s (~FM~SB%s~FC~SN)" % (story.story_title[:32], starred_story.user_tags), + ) else: - logging.user(request, "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN/~FM%s~FC)" % (story.story_title[:32], starred_story.user_tags, starred_story.user_notes)) - - datas.append({'code': code, 'message': message, 'starred_count': starred_count, 'starred_counts': starred_counts}) - + logging.user( + request, + "~FCUpdating starred:~SN~FC ~SB%s~SN (~FM~SB%s~FC~SN/~FM%s~FC)" + % (story.story_title[:32], starred_story.user_tags, starred_story.user_notes), + ) + + datas.append( + { + "code": code, + "message": message, + "starred_count": starred_count, + "starred_counts": starred_counts, + } + ) + if len(datas) >= 2: return datas elif len(datas) == 1: return datas[0] return datas - -@required_params('story_id') + + +@required_params("story_id") @ajax_login_required @json.json_view def mark_story_as_unstarred(request): return _mark_story_as_unstarred(request) - -@required_params('story_hash') + + +@required_params("story_hash") @ajax_login_required @json.json_view def mark_story_hash_as_unstarred(request): return _mark_story_as_unstarred(request) + def _mark_story_as_unstarred(request): - code = 1 - story_id = request.POST.get('story_id', None) - story_hashes = request.POST.getlist('story_hash') or request.POST.getlist('story_hash[]') + code = 1 + story_id = request.POST.get("story_id", None) + story_hashes = request.POST.getlist("story_hash") or request.POST.getlist("story_hash[]") starred_counts = None starred_story = None if story_id: @@ -2720,28 +2959,32 @@ def _mark_story_as_unstarred(request): story_hashes = [starred_story.story_hash] else: story_hashes = [story_id] - + datas = [] for story_hash in story_hashes: starred_story = MStarredStory.objects(user_id=request.user.pk, story_hash=story_hash) if not starred_story: logging.user(request, "~FCUnstarring ~FRfailed~FC: %s not found" % (story_hash)) - datas.append({'code': -1, 'message': "Could not unsave story, not found", 'story_hash': story_hash}) + datas.append( + {"code": -1, "message": "Could not unsave story, not found", "story_hash": story_hash} + ) continue - + starred_story = starred_story[0] logging.user(request, "~FCUnstarring: ~SB%s" % (starred_story.story_title[:50])) user_tags = starred_story.user_tags feed_id = starred_story.story_feed_id - MActivity.remove_starred_story(user_id=request.user.pk, - story_feed_id=starred_story.story_feed_id, - story_id=starred_story.story_guid) + MActivity.remove_starred_story( + user_id=request.user.pk, + story_feed_id=starred_story.story_feed_id, + story_id=starred_story.story_guid, + ) starred_story.user_id = 0 try: starred_story.save() except NotUniqueError: starred_story.delete() - + MStarredStoryCounts.adjust_count(request.user.pk, feed_id=feed_id, amount=-1) for tag in user_tags: @@ -2752,27 +2995,32 @@ def _mark_story_as_unstarred(request): MStarredStoryCounts.schedule_count_tags_for_user(request.user.pk) MStarredStoryCounts.count_for_user(request.user.pk, total_only=True) starred_counts = MStarredStoryCounts.user_counts(request.user.pk) - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(request.user.username, 'story:unstarred:%s' % starred_story.story_hash) - + r.publish(request.user.username, "story:unstarred:%s" % starred_story.story_hash) + if not story_hashes: datas.append(dict(code=-1, message=f"Failed to find {story_hashes}")) - - return {'code': code, 'starred_counts': starred_counts, 'messages': datas} - + + return {"code": code, "starred_counts": starred_counts, "messages": datas} + + @ajax_login_required @json.json_view def starred_counts(request): starred_counts, starred_count = MStarredStoryCounts.user_counts(request.user.pk, include_total=True) - logging.user(request, "~FCRequesting starred counts: ~SB%s stories (%s tags)" % (starred_count, len([s for s in starred_counts if s['tag']]))) + logging.user( + request, + "~FCRequesting starred counts: ~SB%s stories (%s tags)" + % (starred_count, len([s for s in starred_counts if s["tag"]])), + ) + + return {"starred_count": starred_count, "starred_counts": starred_counts} + - return {'starred_count': starred_count, 'starred_counts': starred_counts} - @ajax_login_required @json.json_view def send_story_email(request): - def validate_email_as_bool(email): try: validate_email(email) @@ -2780,46 +3028,49 @@ def validate_email_as_bool(email): except: return False - code = 1 - message = 'OK' - user = get_user(request) - story_id = request.POST['story_id'] - feed_id = request.POST['feed_id'] - to_addresses = request.POST.get('to', '').replace(',', ' ').replace(' ', ' ').strip().split(' ') - from_name = request.POST['from_name'] - from_email = request.POST['from_email'] - email_cc = is_true(request.POST.get('email_cc', 'true')) - comments = request.POST['comments'] - comments = comments[:2048] # Separated due to PyLint - from_address = 'share@newsblur.com' + code = 1 + message = "OK" + user = get_user(request) + story_id = request.POST["story_id"] + feed_id = request.POST["feed_id"] + to_addresses = request.POST.get("to", "").replace(",", " ").replace(" ", " ").strip().split(" ") + from_name = request.POST["from_name"] + from_email = request.POST["from_email"] + email_cc = is_true(request.POST.get("email_cc", "true")) + comments = request.POST["comments"] + comments = comments[:2048] # Separated due to PyLint + from_address = "share@newsblur.com" share_user_profile = MSocialProfile.get_user(request.user.pk) - + quota = 32 if user.profile.is_premium else 1 if share_user_profile.over_story_email_quota(quota=quota): code = -1 if user.profile.is_premium: - message = 'You can only send %s stories per day by email.' % quota + message = "You can only send %s stories per day by email." % quota else: - message = 'Upgrade to a premium subscription to send more than one story per day by email.' - logging.user(request, '~BRNOT ~BMSharing story by email to %s recipient, over quota: %s/%s' % - (len(to_addresses), story_id, feed_id)) + message = "Upgrade to a premium subscription to send more than one story per day by email." + logging.user( + request, + "~BRNOT ~BMSharing story by email to %s recipient, over quota: %s/%s" + % (len(to_addresses), story_id, feed_id), + ) elif not to_addresses: code = -1 - message = 'Please provide at least one email address.' + message = "Please provide at least one email address." elif not all(validate_email_as_bool(to_address) for to_address in to_addresses if to_addresses): code = -1 - message = 'You need to send the email to a valid email address.' + message = "You need to send the email to a valid email address." elif not validate_email_as_bool(from_email): code = -1 - message = 'You need to provide your email address.' + message = "You need to provide your email address." elif not from_name: code = -1 - message = 'You need to provide your name.' + message = "You need to provide your name." else: story, _ = MStory.find_story(feed_id, story_id) - story = Feed.format_story(story, feed_id, text=True) - feed = Feed.get_by_id(story['story_feed_id']) - params = { + story = Feed.format_story(story, feed_id, text=True) + feed = Feed.get_by_id(story["story_feed_id"]) + params = { "to_addresses": to_addresses, "from_name": from_name, "from_email": from_email, @@ -2830,79 +3081,92 @@ def validate_email_as_bool(email): "feed": feed, "share_user_profile": share_user_profile, } - text = render_to_string('mail/email_story.txt', params) - html = render_to_string('mail/email_story.xhtml', params) - subject = '%s' % (story['story_title']) - cc = None + text = render_to_string("mail/email_story.txt", params) + html = render_to_string("mail/email_story.xhtml", params) + subject = "%s" % (story["story_title"]) + cc = None if email_cc: - cc = ['%s <%s>' % (from_name, from_email)] - subject = subject.replace('\n', ' ') - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % from_address, - to=to_addresses, - cc=cc, - headers={'Reply-To': "%s <%s>" % (from_name, from_email)}) + cc = ["%s <%s>" % (from_name, from_email)] + subject = subject.replace("\n", " ") + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % from_address, + to=to_addresses, + cc=cc, + headers={"Reply-To": "%s <%s>" % (from_name, from_email)}, + ) msg.attach_alternative(html, "text/html") # try: msg.send() # except boto.ses.connection.BotoServerError as e: # code = -1 # message = "Email error: %s" % str(e) - + share_user_profile.save_sent_email() - - logging.user(request, '~BMSharing story by email to %s recipient%s (%s): ~FY~SB%s~SN~BM~FY/~SB%s' % - (len(to_addresses), '' if len(to_addresses) == 1 else 's', to_addresses, - story['story_title'][:50], feed and feed.feed_title[:50])) - - return {'code': code, 'message': message} + + logging.user( + request, + "~BMSharing story by email to %s recipient%s (%s): ~FY~SB%s~SN~BM~FY/~SB%s" + % ( + len(to_addresses), + "" if len(to_addresses) == 1 else "s", + to_addresses, + story["story_title"][:50], + feed and feed.feed_title[:50], + ), + ) + + return {"code": code, "message": message} + @json.json_view def load_tutorial(request): - if request.GET.get('finished'): - logging.user(request, '~BY~FW~SBFinishing Tutorial') + if request.GET.get("finished"): + logging.user(request, "~BY~FW~SBFinishing Tutorial") return {} else: - newsblur_feed = Feed.objects.filter(feed_address__icontains='blog.newsblur.com').order_by('-pk')[0] - logging.user(request, '~BY~FW~SBLoading Tutorial') - return { - 'newsblur_feed': newsblur_feed.canonical() - } + newsblur_feed = Feed.objects.filter(feed_address__icontains="blog.newsblur.com").order_by("-pk")[0] + logging.user(request, "~BY~FW~SBLoading Tutorial") + return {"newsblur_feed": newsblur_feed.canonical()} + -@required_params('query', 'feed_id') +@required_params("query", "feed_id") @json.json_view def save_search(request): - feed_id = request.POST['feed_id'] - query = request.POST['query'] - + feed_id = request.POST["feed_id"] + query = request.POST["query"] + MSavedSearch.save_search(user_id=request.user.pk, feed_id=feed_id, query=query) - + saved_searches = MSavedSearch.user_searches(request.user.pk) - + return { - 'saved_searches': saved_searches, + "saved_searches": saved_searches, } -@required_params('query', 'feed_id') + +@required_params("query", "feed_id") @json.json_view def delete_search(request): - feed_id = request.POST['feed_id'] - query = request.POST['query'] + feed_id = request.POST["feed_id"] + query = request.POST["query"] MSavedSearch.delete_search(user_id=request.user.pk, feed_id=feed_id, query=query) saved_searches = MSavedSearch.user_searches(request.user.pk) return { - 'saved_searches': saved_searches, + "saved_searches": saved_searches, } -@required_params('river_id', 'river_side', 'river_order') + +@required_params("river_id", "river_side", "river_order") @json.json_view def save_dashboard_river(request): - river_id = request.POST['river_id'] - river_side = request.POST['river_side'] - river_order = int(request.POST['river_order']) + river_id = request.POST["river_id"] + river_side = request.POST["river_side"] + river_order = int(request.POST["river_order"]) logging.user(request, "~FCSaving dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order)) @@ -2910,21 +3174,24 @@ def save_dashboard_river(request): dashboard_rivers = MDashboardRiver.get_user_rivers(request.user.pk) return { - 'dashboard_rivers': dashboard_rivers, + "dashboard_rivers": dashboard_rivers, } -@required_params('river_id', 'river_side', 'river_order') + +@required_params("river_id", "river_side", "river_order") @json.json_view def remove_dashboard_river(request): - river_id = request.POST['river_id'] - river_side = request.POST['river_side'] - river_order = int(request.POST['river_order']) + river_id = request.POST["river_id"] + river_side = request.POST["river_side"] + river_order = int(request.POST["river_order"]) - logging.user(request, "~FRRemoving~FC dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order)) + logging.user( + request, "~FRRemoving~FC dashboard river: ~SB%s~SN (%s %s)" % (river_id, river_side, river_order) + ) MDashboardRiver.remove_river(request.user.pk, river_side, river_order) dashboard_rivers = MDashboardRiver.get_user_rivers(request.user.pk) return { - 'dashboard_rivers': dashboard_rivers, + "dashboard_rivers": dashboard_rivers, } diff --git a/apps/recommendations/migrations/0001_initial.py b/apps/recommendations/migrations/0001_initial.py index 5d1623cf9f..24217a40d1 100644 --- a/apps/recommendations/migrations/0001_initial.py +++ b/apps/recommendations/migrations/0001_initial.py @@ -6,40 +6,73 @@ class Migration(migrations.Migration): - initial = True dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( - name='RecommendedFeed', + name="RecommendedFeed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('description', models.TextField(blank=True, null=True)), - ('is_public', models.BooleanField(default=False)), - ('created_date', models.DateField(auto_now_add=True)), - ('approved_date', models.DateField(null=True)), - ('declined_date', models.DateField(null=True)), - ('twitter', models.CharField(blank=True, max_length=50, null=True)), - ('feed', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recommendations', to='rss_feeds.Feed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='recommendations', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("description", models.TextField(blank=True, null=True)), + ("is_public", models.BooleanField(default=False)), + ("created_date", models.DateField(auto_now_add=True)), + ("approved_date", models.DateField(null=True)), + ("declined_date", models.DateField(null=True)), + ("twitter", models.CharField(blank=True, max_length=50, null=True)), + ( + "feed", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="recommendations", + to="rss_feeds.Feed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="recommendations", + to=settings.AUTH_USER_MODEL, + ), + ), ], options={ - 'ordering': ['-approved_date', '-created_date'], + "ordering": ["-approved_date", "-created_date"], }, ), migrations.CreateModel( - name='RecommendedFeedUserFeedback', + name="RecommendedFeedUserFeedback", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('score', models.IntegerField(default=0)), - ('created_date', models.DateField(auto_now_add=True)), - ('recommendation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feedback', to='recommendations.RecommendedFeed')), - ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='feed_feedback', to=settings.AUTH_USER_MODEL)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("score", models.IntegerField(default=0)), + ("created_date", models.DateField(auto_now_add=True)), + ( + "recommendation", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="feedback", + to="recommendations.RecommendedFeed", + ), + ), + ( + "user", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="feed_feedback", + to=settings.AUTH_USER_MODEL, + ), + ), ], ), ] diff --git a/apps/recommendations/models.py b/apps/recommendations/models.py index 5914fed7a0..43976f11b5 100644 --- a/apps/recommendations/models.py +++ b/apps/recommendations/models.py @@ -6,67 +6,69 @@ from utils import json_functions as json from collections import defaultdict + class RecommendedFeed(models.Model): - feed = models.ForeignKey(Feed, related_name='recommendations', on_delete=models.CASCADE) - user = models.ForeignKey(User, related_name='recommendations', on_delete=models.CASCADE) - description = models.TextField(null=True, blank=True) - is_public = models.BooleanField(default=False) - created_date = models.DateField(auto_now_add=True) + feed = models.ForeignKey(Feed, related_name="recommendations", on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="recommendations", on_delete=models.CASCADE) + description = models.TextField(null=True, blank=True) + is_public = models.BooleanField(default=False) + created_date = models.DateField(auto_now_add=True) approved_date = models.DateField(null=True) declined_date = models.DateField(null=True) - twitter = models.CharField(max_length=50, null=True, blank=True) - + twitter = models.CharField(max_length=50, null=True, blank=True) + def __str__(self): return "%s (%s)" % (self.feed, self.approved_date or self.created_date) - + class Meta: - ordering = ['-approved_date', '-created_date'] + ordering = ["-approved_date", "-created_date"] class RecommendedFeedUserFeedback(models.Model): - recommendation = models.ForeignKey(RecommendedFeed, related_name='feedback', on_delete=models.CASCADE) - user = models.ForeignKey(User, related_name='feed_feedback', on_delete=models.CASCADE) - score = models.IntegerField(default=0) - created_date = models.DateField(auto_now_add=True) + recommendation = models.ForeignKey(RecommendedFeed, related_name="feedback", on_delete=models.CASCADE) + user = models.ForeignKey(User, related_name="feed_feedback", on_delete=models.CASCADE) + score = models.IntegerField(default=0) + created_date = models.DateField(auto_now_add=True) + class MFeedFolder(mongo.Document): feed_id = mongo.IntField() folder = mongo.StringField() count = mongo.IntField() - + meta = { - 'collection': 'feed_folders', - 'indexes': ['feed_id', 'folder'], - 'allow_inheritance': False, + "collection": "feed_folders", + "indexes": ["feed_id", "folder"], + "allow_inheritance": False, } - + def __str__(self): feed = Feed.get_by_id(self.feed_id) return "%s - %s (%s)" % (feed, self.folder, self.count) - + @classmethod def count_feed(cls, feed_id): feed = Feed.get_by_id(feed_id) print(feed) found_folders = defaultdict(int) - user_ids = [sub['user_id'] for sub in UserSubscription.objects.filter(feed=feed).values('user_id')] + user_ids = [sub["user_id"] for sub in UserSubscription.objects.filter(feed=feed).values("user_id")] usf = UserSubscriptionFolders.objects.filter(user_id__in=user_ids) for sub in usf: user_sub_folders = json.decode(sub.folders) folder_title = cls.feed_folder_parent(user_sub_folders, feed.pk) - if not folder_title: continue + if not folder_title: + continue found_folders[folder_title.lower()] += 1 # print "%-20s - %s" % (folder_title if folder_title != '' else '[Top]', sub.user_id) print(sorted(list(found_folders.items()), key=lambda f: f[1], reverse=True)) - - + @classmethod - def feed_folder_parent(cls, folders, feed_id, folder_title=''): + def feed_folder_parent(cls, folders, feed_id, folder_title=""): for item in folders: if isinstance(item, int) and item == feed_id: return folder_title elif isinstance(item, dict): for f_k, f_v in list(item.items()): sub_folder_title = cls.feed_folder_parent(f_v, feed_id, f_k) - if sub_folder_title: + if sub_folder_title: return sub_folder_title diff --git a/apps/recommendations/templatetags/recommendations_tags.py b/apps/recommendations/templatetags/recommendations_tags.py index 3978381de8..acd3d41fe8 100644 --- a/apps/recommendations/templatetags/recommendations_tags.py +++ b/apps/recommendations/templatetags/recommendations_tags.py @@ -7,25 +7,25 @@ register = template.Library() -@register.inclusion_tag('recommendations/render_recommended_feed.xhtml', takes_context=True) + +@register.inclusion_tag("recommendations/render_recommended_feed.xhtml", takes_context=True) def render_recommended_feed(context, recommended_feeds, unmoderated=False): - user = get_user(context['user']) - + user = get_user(context["user"]) + usersub = None - if context['user'].is_authenticated: + if context["user"].is_authenticated: usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) recommended_feed = recommended_feeds and recommended_feeds[0] feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) - + if recommended_feed: return { - 'recommended_feed' : recommended_feed, - 'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline, - 'usersub' : usersub, - 'feed_icon' : feed_icon and feed_icon[0], - 'user' : context['user'], - 'has_next_page' : len(recommended_feeds) > 1, - 'unmoderated' : unmoderated, - 'today' : datetime.datetime.now(), + "recommended_feed": recommended_feed, + "description": recommended_feed.description or recommended_feed.feed.data.feed_tagline, + "usersub": usersub, + "feed_icon": feed_icon and feed_icon[0], + "user": context["user"], + "has_next_page": len(recommended_feeds) > 1, + "unmoderated": unmoderated, + "today": datetime.datetime.now(), } - \ No newline at end of file diff --git a/apps/recommendations/tests.py b/apps/recommendations/tests.py index c7c4668e12..f51d798ffd 100644 --- a/apps/recommendations/tests.py +++ b/apps/recommendations/tests.py @@ -7,6 +7,7 @@ from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ def test_basic_addition(self): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/recommendations/urls.py b/apps/recommendations/urls.py index 481b618e29..9a61b3d5d0 100644 --- a/apps/recommendations/urls.py +++ b/apps/recommendations/urls.py @@ -2,9 +2,9 @@ from apps.recommendations import views urlpatterns = [ - url(r'^load_recommended_feed', views.load_recommended_feed, name='load-recommended-feed'), - url(r'^save_recommended_feed', views.save_recommended_feed, name='save-recommended-feed'), - url(r'^approve_feed', views.approve_feed, name='approve-recommended-feed'), - url(r'^decline_feed', views.decline_feed, name='decline-recommended-feed'), - url(r'^load_feed_info/(?P\d+)', views.load_feed_info, name='load-recommended-feed-info'), + url(r"^load_recommended_feed", views.load_recommended_feed, name="load-recommended-feed"), + url(r"^save_recommended_feed", views.save_recommended_feed, name="save-recommended-feed"), + url(r"^approve_feed", views.approve_feed, name="approve-recommended-feed"), + url(r"^decline_feed", views.decline_feed, name="decline-recommended-feed"), + url(r"^load_feed_info/(?P\d+)", views.load_feed_info, name="load-recommended-feed-info"), ] diff --git a/apps/recommendations/views.py b/apps/recommendations/views.py index ae9ac60654..dabdbd9c1a 100644 --- a/apps/recommendations/views.py +++ b/apps/recommendations/views.py @@ -11,43 +11,52 @@ def load_recommended_feed(request): - user = get_user(request) - page = max(int(request.GET.get('page', 0)), 0) - usersub = None - refresh = request.GET.get('refresh') - now = datetime.datetime.now() - unmoderated = request.GET.get('unmoderated', False) == 'true' - + user = get_user(request) + page = max(int(request.GET.get("page", 0)), 0) + usersub = None + refresh = request.GET.get("refresh") + now = datetime.datetime.now() + unmoderated = request.GET.get("unmoderated", False) == "true" + if unmoderated: - recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[page:page+2] + recommended_feeds = RecommendedFeed.objects.filter(is_public=False, declined_date__isnull=True)[ + page : page + 2 + ] else: - recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[page:page+2] + recommended_feeds = RecommendedFeed.objects.filter(is_public=True, approved_date__lte=now)[ + page : page + 2 + ] if recommended_feeds and request.user.is_authenticated: usersub = UserSubscription.objects.filter(user=user, feed=recommended_feeds[0].feed) - if refresh != 'true' and page > 0: - logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page+1)) - + if refresh != "true" and page > 0: + logging.user(request, "~FBBrowse recommended feed: ~SBPage #%s" % (page + 1)) + recommended_feed = recommended_feeds and recommended_feeds[0] if not recommended_feeds: return HttpResponse("") - + feed_icon = MFeedIcon.objects(feed_id=recommended_feed.feed_id) - + if recommended_feed: - return render(request, 'recommendations/render_recommended_feed.xhtml', { - 'recommended_feed' : recommended_feed, - 'description' : recommended_feed.description or recommended_feed.feed.data.feed_tagline, - 'usersub' : usersub, - 'feed_icon' : feed_icon and feed_icon[0], - 'has_next_page' : len(recommended_feeds) > 1, - 'has_previous_page' : page != 0, - 'unmoderated' : unmoderated, - 'today' : datetime.datetime.now(), - 'page' : page, - }) + return render( + request, + "recommendations/render_recommended_feed.xhtml", + { + "recommended_feed": recommended_feed, + "description": recommended_feed.description or recommended_feed.feed.data.feed_tagline, + "usersub": usersub, + "feed_icon": feed_icon and feed_icon[0], + "has_next_page": len(recommended_feeds) > 1, + "has_previous_page": page != 0, + "unmoderated": unmoderated, + "today": datetime.datetime.now(), + "page": page, + }, + ) else: return HttpResponse("") - + + @json.json_view def load_feed_info(request, feed_id): feed = get_object_or_404(Feed, pk=feed_id) @@ -56,58 +65,56 @@ def load_feed_info(request, feed_id): recommended_feed = RecommendedFeed.objects.filter(user=request.user, feed=feed) if recommended_feed: previous_recommendation = recommended_feed[0].created_date - + return { - 'num_subscribers': feed.num_subscribers, - 'tagline': feed.data.feed_tagline, - 'previous_recommendation': previous_recommendation + "num_subscribers": feed.num_subscribers, + "tagline": feed.data.feed_tagline, + "previous_recommendation": previous_recommendation, } - + + @ajax_login_required @json.json_view def save_recommended_feed(request): - feed_id = request.POST['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) - tagline = request.POST['tagline'] - twitter = request.POST.get('twitter') - code = 1 - + feed_id = request.POST["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) + tagline = request.POST["tagline"] + twitter = request.POST.get("twitter") + code = 1 + recommended_feed, created = RecommendedFeed.objects.get_or_create( - feed=feed, - user=request.user, - defaults=dict( - description=tagline, - twitter=twitter - ) + feed=feed, user=request.user, defaults=dict(description=tagline, twitter=twitter) ) return dict(code=code if created else -1) - + + @admin_only @ajax_login_required def approve_feed(request): - feed_id = request.POST['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) - date = request.POST['date'] + feed_id = request.POST["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) + date = request.POST["date"] recommended_feed = RecommendedFeed.objects.filter(feed=feed)[0] - - year, month, day = re.search(r'(\d{4})-(\d{1,2})-(\d{1,2})', date).groups() + + year, month, day = re.search(r"(\d{4})-(\d{1,2})-(\d{1,2})", date).groups() recommended_feed.is_public = True recommended_feed.approved_date = datetime.date(int(year), int(month), int(day)) recommended_feed.save() - + return load_recommended_feed(request) + @admin_only @ajax_login_required def decline_feed(request): - feed_id = request.GET['feed_id'] - feed = get_object_or_404(Feed, pk=int(feed_id)) + feed_id = request.GET["feed_id"] + feed = get_object_or_404(Feed, pk=int(feed_id)) recommended_feeds = RecommendedFeed.objects.filter(feed=feed) - + for recommended_feed in recommended_feeds: recommended_feed.is_public = False recommended_feed.declined_date = datetime.datetime.now() recommended_feed.save() - + return load_recommended_feed(request) diff --git a/apps/rss_feeds/factories.py b/apps/rss_feeds/factories.py index 74abbdcc7e..c6d6c0d622 100644 --- a/apps/rss_feeds/factories.py +++ b/apps/rss_feeds/factories.py @@ -8,24 +8,27 @@ NEWSBLUR_DIR = settings.NEWSBLUR_DIR fake = Faker() + def generate_address(): return f"{NEWSBLUR_DIR}/apps/analyzer/fixtures/{fake.word()}.xml" + class FeedFactory(DjangoModelFactory): feed_address = FuzzyAttribute(generate_address) feed_link = FuzzyAttribute(generate_address) - creation = factory.Faker('date') - feed_title = factory.Faker('sentence') - last_update = factory.Faker('date_time') - next_scheduled_update = factory.Faker('date_time') - last_story_date = factory.Faker('date_time') + creation = factory.Faker("date") + feed_title = factory.Faker("sentence") + last_update = factory.Faker("date_time") + next_scheduled_update = factory.Faker("date_time") + last_story_date = factory.Faker("date_time") min_to_decay = 1 - last_modified = factory.Faker('date_time') + last_modified = factory.Faker("date_time") hash_address_and_link = fake.sha1() class Meta: model = Feed + class DuplicateFeedFactory(DjangoModelFactory): class Meta: - model = DuplicateFeed \ No newline at end of file + model = DuplicateFeed diff --git a/apps/rss_feeds/icon_importer.py b/apps/rss_feeds/icon_importer.py index 95108b55c4..9db42f1a4e 100644 --- a/apps/rss_feeds/icon_importer.py +++ b/apps/rss_feeds/icon_importer.py @@ -33,7 +33,6 @@ class IconImporter(object): - def __init__(self, feed, page_data=None, force=False): self.feed = feed self.force = force @@ -45,27 +44,27 @@ def save(self): # print 'Not found, skipping...' return if ( - not self.force - and not self.feed.favicon_not_found - and self.feed_icon.icon_url - and self.feed.s3_icon + not self.force + and not self.feed.favicon_not_found + and self.feed_icon.icon_url + and self.feed.s3_icon ): # print 'Found, but skipping...' return - if 'facebook.com' in self.feed.feed_address: + if "facebook.com" in self.feed.feed_address: image, image_file, icon_url = self.fetch_facebook_image() else: image, image_file, icon_url = self.fetch_image_from_page_data() if not image: image, image_file, icon_url = self.fetch_image_from_path(force=self.force) - + if not image: self.feed_icon.not_found = True self.feed_icon.save() self.feed.favicon_not_found = True self.feed.save() return False - + image = self.normalize_image(image) try: color = self.determine_dominant_color_in_image(image) @@ -79,49 +78,53 @@ def save(self): if len(image_str) > 500000: image = None - if (image and - (self.force or - self.feed_icon.data != image_str or - self.feed_icon.icon_url != icon_url or - self.feed_icon.not_found or - (settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon))): - logging.debug(" ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" % ( - self.feed.log_title[:30], - self.feed_icon.color != color, self.feed_icon.color, color, - self.feed_icon.data != image_str, - self.feed_icon.icon_url != icon_url, - self.feed_icon.not_found, - settings.BACKED_BY_AWS.get('icons_on_s3') and not self.feed.s3_icon)) + if image and ( + self.force + or self.feed_icon.data != image_str + or self.feed_icon.icon_url != icon_url + or self.feed_icon.not_found + or (settings.BACKED_BY_AWS.get("icons_on_s3") and not self.feed.s3_icon) + ): + logging.debug( + " ---> [%-30s] ~SN~FBIcon difference:~FY color:%s (%s/%s) data:%s url:%s notfound:%s no-s3:%s" + % ( + self.feed.log_title[:30], + self.feed_icon.color != color, + self.feed_icon.color, + color, + self.feed_icon.data != image_str, + self.feed_icon.icon_url != icon_url, + self.feed_icon.not_found, + settings.BACKED_BY_AWS.get("icons_on_s3") and not self.feed.s3_icon, + ) + ) self.feed_icon.data = image_str self.feed_icon.icon_url = icon_url self.feed_icon.color = color self.feed_icon.not_found = False self.feed_icon.save() - if settings.BACKED_BY_AWS.get('icons_on_s3'): + if settings.BACKED_BY_AWS.get("icons_on_s3"): self.save_to_s3(image_str) if self.feed.favicon_color != color: self.feed.favicon_color = color self.feed.favicon_not_found = False - self.feed.save(update_fields=['favicon_color', 'favicon_not_found']) - + self.feed.save(update_fields=["favicon_color", "favicon_not_found"]) + return not self.feed.favicon_not_found def save_to_s3(self, image_str): expires = datetime.datetime.now() + datetime.timedelta(days=60) expires = expires.strftime("%a, %d %b %Y %H:%M:%S GMT") base64.b64decode(image_str) - settings.S3_CONN.Object(settings.S3_ICONS_BUCKET_NAME, - self.feed.s3_icons_key).put(Body=base64.b64decode(image_str), - ContentType='image/png', - Expires=expires, - ACL='public-read' - ) + settings.S3_CONN.Object(settings.S3_ICONS_BUCKET_NAME, self.feed.s3_icons_key).put( + Body=base64.b64decode(image_str), ContentType="image/png", Expires=expires, ACL="public-read" + ) self.feed.s3_icon = True self.feed.save() def load_icon(self, image_file, index=None): - ''' + """ DEPRECATED Load Windows ICO image. @@ -130,10 +133,10 @@ def load_icon(self, image_file, index=None): description. Cribbed and modified from http://djangosnippets.org/snippets/1287/ - ''' + """ try: image_file.seek(0) - header = struct.unpack('<3H', image_file.read(6)) + header = struct.unpack("<3H", image_file.read(6)) except Exception: return @@ -144,7 +147,7 @@ def load_icon(self, image_file, index=None): # Collect icon directories directories = [] for i in range(header[2]): - directory = list(struct.unpack('<4B2H2I', image_file.read(16))) + directory = list(struct.unpack("<4B2H2I", image_file.read(16))) for j in range(3): if not directory[j]: directory[j] = 256 @@ -175,7 +178,7 @@ def load_icon(self, image_file, index=None): image = BmpImagePlugin.DibImageFile(image_file) except IOError: return - if image.mode == 'RGBA': + if image.mode == "RGBA": # Windows XP 32-bit color depth icon without AND bitmap pass else: @@ -194,10 +197,9 @@ def load_icon(self, image_file, index=None): # Load AND bitmap image_file.seek(offset) string = image_file.read(size) - mask = Image.frombytes('1', image.size, string, 'raw', - ('1;I', stride, -1)) + mask = Image.frombytes("1", image.size, string, "raw", ("1;I", stride, -1)) - image = image.convert('RGBA') + image = image.convert("RGBA") image.putalpha(mask) return image @@ -208,7 +210,7 @@ def fetch_image_from_page_data(self): content = None if self.page_data: content = self.page_data - elif settings.BACKED_BY_AWS.get('pages_on_node'): + elif settings.BACKED_BY_AWS.get("pages_on_node"): domain = "node-page.service.consul:8008" if settings.DOCKERBUILD: domain = "node:8008" @@ -222,7 +224,7 @@ def fetch_image_from_page_data(self): content = page_response.content except requests.ConnectionError: pass - elif settings.BACKED_BY_AWS.get('pages_on_s3') and self.feed.s3_page: + elif settings.BACKED_BY_AWS.get("pages_on_s3") and self.feed.s3_page: key = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key) compressed_content = key.get()["Body"].read() stream = BytesIO(compressed_content) @@ -238,28 +240,35 @@ def fetch_image_from_page_data(self): try: content = requests.get(self.cleaned_feed_link, timeout=10).content url = self._url_from_html(content) - except (AttributeError, SocketError, requests.ConnectionError, - requests.models.MissingSchema, requests.sessions.InvalidSchema, - requests.sessions.TooManyRedirects, - requests.models.InvalidURL, - requests.models.ChunkedEncodingError, - requests.models.ContentDecodingError, - http.client.IncompleteRead, - requests.adapters.ReadTimeout, - LocationParseError, OpenSSLError, PyAsn1Error, - ValueError) as e: + except ( + AttributeError, + SocketError, + requests.ConnectionError, + requests.models.MissingSchema, + requests.sessions.InvalidSchema, + requests.sessions.TooManyRedirects, + requests.models.InvalidURL, + requests.models.ChunkedEncodingError, + requests.models.ContentDecodingError, + http.client.IncompleteRead, + requests.adapters.ReadTimeout, + LocationParseError, + OpenSSLError, + PyAsn1Error, + ValueError, + ) as e: logging.debug(" ---> ~SN~FRFailed~FY to fetch ~FGfeed icon~FY: %s" % e) if url: image, image_file = self.get_image_from_url(url) return image, image_file, url - + @property def cleaned_feed_link(self): - if self.feed.feed_link.startswith('http'): + if self.feed.feed_link.startswith("http"): return self.feed.feed_link - return 'http://' + self.feed.feed_link - - def fetch_image_from_path(self, path='favicon.ico', force=False): + return "http://" + self.feed.feed_link + + def fetch_image_from_path(self, path="favicon.ico", force=False): image = None url = None @@ -267,7 +276,7 @@ def fetch_image_from_path(self, path='favicon.ico', force=False): url = self.feed_icon.icon_url if not url and self.feed.feed_link and len(self.feed.feed_link) > 6: try: - url = urllib.parse.urljoin(self.feed.feed_link, 'favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "favicon.ico") except ValueError: url = None if not url: @@ -275,21 +284,21 @@ def fetch_image_from_path(self, path='favicon.ico', force=False): image, image_file = self.get_image_from_url(url) if not image: - url = urllib.parse.urljoin(self.feed.feed_link, '/favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "/favicon.ico") image, image_file = self.get_image_from_url(url) # print 'Found: %s - %s' % (url, image) return image, image_file, url - + def fetch_facebook_image(self): facebook_fetcher = FacebookFetcher(self.feed) url = facebook_fetcher.favicon_url() image, image_file = self.get_image_from_url(url) if not image: - url = urllib.parse.urljoin(self.feed.feed_link, '/favicon.ico') + url = urllib.parse.urljoin(self.feed.feed_link, "/favicon.ico") image, image_file = self.get_image_from_url(url) # print 'Found: %s - %s' % (url, image) return image, image_file, url - + def get_image_from_url(self, url): # print 'Requesting: %s' % url if not url: @@ -298,15 +307,15 @@ def get_image_from_url(self, url): @timelimit(30) def _1(url): headers = { - 'User-Agent': 'NewsBlur Favicon Fetcher - %s subscriber%s - %s %s' % - ( - self.feed.num_subscribers, - 's' if self.feed.num_subscribers != 1 else '', - self.feed.permalink, - self.feed.fake_user_agent, - ), - 'Connection': 'close', - 'Accept': 'image/png,image/x-icon,image/*;q=0.9,*/*;q=0.8' + "User-Agent": "NewsBlur Favicon Fetcher - %s subscriber%s - %s %s" + % ( + self.feed.num_subscribers, + "s" if self.feed.num_subscribers != 1 else "", + self.feed.permalink, + self.feed.fake_user_agent, + ), + "Connection": "close", + "Accept": "image/png,image/x-icon,image/*;q=0.9,*/*;q=0.8", } try: request = urllib.request.Request(url, headers=headers) @@ -314,6 +323,7 @@ def _1(url): except Exception: return None return icon + try: icon = _1(url) except TimeoutError: @@ -333,7 +343,7 @@ def _url_from_html(self, content): return url try: if isinstance(content, str): - content = content.encode('utf-8') + content = content.encode("utf-8") icon_path = lxml.html.fromstring(content).xpath( '//link[@rel="icon" or @rel="shortcut icon"]/@href' ) @@ -341,7 +351,7 @@ def _url_from_html(self, content): return url if icon_path: - if str(icon_path[0]).startswith('http'): + if str(icon_path[0]).startswith("http"): url = icon_path[0] else: url = urllib.parse.urljoin(self.feed.feed_link, icon_path[0]) @@ -350,9 +360,9 @@ def _url_from_html(self, content): def normalize_image(self, image): # if image.size != (16, 16): # image = image.resize((16, 16), Image.BICUBIC) - if image.mode != 'RGBA': + if image.mode != "RGBA": try: - image = image.convert('RGBA') + image = image.convert("RGBA") except IOError: pass @@ -362,8 +372,8 @@ def determine_dominant_color_in_image(self, image): NUM_CLUSTERS = 5 # Convert image into array of values for each point. - if image.mode == '1': - image.convert('L') + if image.mode == "1": + image.convert("L") ar = numpy.array(image) # ar = scipy.misc.fromimage(image) shape = ar.shape @@ -371,7 +381,7 @@ def determine_dominant_color_in_image(self, image): # Reshape array of values to merge color bands. [[R], [G], [B], [A]] => [R, G, B, A] if len(shape) > 2: ar = ar.reshape(scipy.product(shape[:2]), shape[2]) - + # Get NUM_CLUSTERS worth of centroids. ar = ar.astype(numpy.float) codes, _ = scipy.cluster.vq.kmeans(ar, NUM_CLUSTERS) @@ -379,9 +389,16 @@ def determine_dominant_color_in_image(self, image): # Pare centroids, removing blacks and whites and shades of really dark and really light. original_codes = codes for low, hi in [(60, 200), (35, 230), (10, 250)]: - codes = scipy.array([code for code in codes - if not ((code[0] < low and code[1] < low and code[2] < low) or - (code[0] > hi and code[1] > hi and code[2] > hi))]) + codes = scipy.array( + [ + code + for code in codes + if not ( + (code[0] < low and code[1] < low and code[2] < low) + or (code[0] > hi and code[1] > hi and code[2] > hi) + ) + ] + ) if not len(codes): codes = original_codes else: @@ -409,7 +426,7 @@ def determine_dominant_color_in_image(self, image): def string_from_image(self, image): output = BytesIO() - image.save(output, 'png', quality=95) + image.save(output, "png", quality=95) contents = output.getvalue() output.close() return base64.b64encode(contents).decode() diff --git a/apps/rss_feeds/management/commands/calculate_scores.py b/apps/rss_feeds/management/commands/calculate_scores.py index 8f914f28c4..7e371554ad 100644 --- a/apps/rss_feeds/management/commands/calculate_scores.py +++ b/apps/rss_feeds/management/commands/calculate_scores.py @@ -7,55 +7,67 @@ import re import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-a", "--all", dest="all", action="store_true", help="All feeds, need it or not (can be combined with a user)"), - parser.add_argument("-s", "--silent", dest="silent", default=False, action="store_true", help="Inverse verbosity."), + parser.add_argument( + "-a", + "--all", + dest="all", + action="store_true", + help="All feeds, need it or not (can be combined with a user)", + ), + parser.add_argument( + "-s", "--silent", dest="silent", default=False, action="store_true", help="Inverse verbosity." + ), parser.add_argument("-u", "--user", dest="user", nargs=1, help="Specify user id or username"), parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true"), - parser.add_argument("-D", "--days", dest="days", nargs=1, default=1, type='int'), - parser.add_argument("-O", "--offset", dest="offset", nargs=1, default=0, type='int'), + parser.add_argument("-D", "--days", dest="days", nargs=1, default=1, type="int"), + parser.add_argument("-O", "--offset", dest="offset", nargs=1, default=0, type="int"), def handle(self, *args, **options): settings.LOG_TO_STREAM = True - if options['daemonize']: + if options["daemonize"]: daemonize() - if options['user']: - if re.match(r"([0-9]+)", options['user']): - users = User.objects.filter(pk=int(options['user'])) + if options["user"]: + if re.match(r"([0-9]+)", options["user"]): + users = User.objects.filter(pk=int(options["user"])) else: - users = User.objects.filter(username=options['user']) + users = User.objects.filter(username=options["user"]) else: - users = User.objects.filter(profile__last_seen_on__gte=datetime.datetime.now()-datetime.timedelta(days=options['days'])).order_by('pk') - + users = User.objects.filter( + profile__last_seen_on__gte=datetime.datetime.now() - datetime.timedelta(days=options["days"]) + ).order_by("pk") + user_count = users.count() for i, u in enumerate(users): - if i < options['offset']: continue - if options['all']: + if i < options["offset"]: + continue + if options["all"]: usersubs = UserSubscription.objects.filter(user=u, active=True) else: usersubs = UserSubscription.objects.filter(user=u, needs_unread_recalc=True) - print((" ---> %s has %s feeds (%s/%s)" % (u.username, usersubs.count(), i+1, user_count))) + print((" ---> %s has %s feeds (%s/%s)" % (u.username, usersubs.count(), i + 1, user_count))) for sub in usersubs: try: - sub.calculate_feed_scores(silent=options['silent']) + sub.calculate_feed_scores(silent=options["silent"]) except Exception as e: print((" ***> Exception: %s" % e)) continue - + + def daemonize(): """ Detach from the terminal and continue as a daemon. """ # swiped from twisted/scripts/twistd.py # See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16 - if os.fork(): # launch child and... - os._exit(0) # kill off parent + if os.fork(): # launch child and... + os._exit(0) # kill off parent os.setsid() - if os.fork(): # launch child and... - os._exit(0) # kill off parent again. + if os.fork(): # launch child and... + os._exit(0) # kill off parent again. os.umask(0o77) null = os.open("/dev/null", os.O_RDWR) for i in range(3): @@ -64,4 +76,4 @@ def daemonize(): except OSError as e: if e.errno != errno.EBADF: raise - os.close(null) \ No newline at end of file + os.close(null) diff --git a/apps/rss_feeds/management/commands/count_stories.py b/apps/rss_feeds/management/commands/count_stories.py index 06d7ba91bd..41db438e6d 100644 --- a/apps/rss_feeds/management/commands/count_stories.py +++ b/apps/rss_feeds/management/commands/count_stories.py @@ -1,23 +1,23 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-t", "--title", dest="title", default=None) parser.add_argument("-V", "--verbose", dest="verbose", action="store_true") - + def handle(self, *args, **options): - if options['title']: - feeds = Feed.objects.filter(feed_title__icontains=options['title']) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + if options["title"]: + feeds = Feed.objects.filter(feed_title__icontains=options["title"]) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.all() # Count stories in past month to calculate next scheduled update for feed in feeds: - feed.count_stories(verbose=options['verbose']) - - print(("\nCounted %s feeds" % feeds.count())) \ No newline at end of file + feed.count_stories(verbose=options["verbose"]) + + print(("\nCounted %s feeds" % feeds.count())) diff --git a/apps/rss_feeds/management/commands/count_subscribers.py b/apps/rss_feeds/management/commands/count_subscribers.py index 40757a44d2..a9e465f7b8 100644 --- a/apps/rss_feeds/management/commands/count_subscribers.py +++ b/apps/rss_feeds/management/commands/count_subscribers.py @@ -1,34 +1,34 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-t", "--title", dest="title", default=None) parser.add_argument("-V", "--verbose", dest="verbose", action="store_true") parser.add_argument("-D", "--delete", dest="delete", action="store_true") - + def handle(self, *args, **options): - if options['title']: - feeds = Feed.objects.filter(feed_title__icontains=options['title']) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + if options["title"]: + feeds = Feed.objects.filter(feed_title__icontains=options["title"]) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.all() - + feeds_count = feeds.count() - + for i in range(0, feeds_count, 100): - feeds = Feed.objects.all()[i:i+100] + feeds = Feed.objects.all()[i : i + 100] for feed in feeds.iterator(): - feed.count_subscribers(verbose=options['verbose']) - - if options['delete']: + feed.count_subscribers(verbose=options["verbose"]) + + if options["delete"]: print("# Deleting old feeds...") old_feeds = Feed.objects.filter(num_subscribers=0) for feed in old_feeds: feed.count_subscribers(verbose=True) if feed.num_subscribers == 0: - print((' ---> Deleting: [%s] %s' % (feed.pk, feed))) - feed.delete() \ No newline at end of file + print((" ---> Deleting: [%s] %s" % (feed.pk, feed))) + feed.delete() diff --git a/apps/rss_feeds/management/commands/mark_read.py b/apps/rss_feeds/management/commands/mark_read.py index f72d4f5f37..ae158ead43 100644 --- a/apps/rss_feeds/management/commands/mark_read.py +++ b/apps/rss_feeds/management/commands/mark_read.py @@ -3,28 +3,30 @@ from apps.reader.models import UserSubscription import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-d", "--days", dest="days", nargs=1, default=1, help="Days of unread") parser.add_argument("-u", "--username", dest="username", nargs=1, help="Specify user id or username") parser.add_argument("-U", "--userid", dest="userid", nargs=1, help="Specify user id or username") - + def handle(self, *args, **options): - if options['userid']: - user = User.objects.filter(pk=options['userid'])[0] - elif options['username']: - user = User.objects.get(username__icontains=options['username']) + if options["userid"]: + user = User.objects.filter(pk=options["userid"])[0] + elif options["username"]: + user = User.objects.get(username__icontains=options["username"]) else: raise Exception("Need username or user id.") - + user.profile.last_seen_on = datetime.datetime.utcnow() user.profile.save() feeds = UserSubscription.objects.filter(user=user) for sub in feeds: - if options['days'] == 0: + if options["days"] == 0: sub.mark_feed_read() else: - sub.mark_read_date = datetime.datetime.utcnow() - datetime.timedelta(days=int(options['days'])) + sub.mark_read_date = datetime.datetime.utcnow() - datetime.timedelta( + days=int(options["days"]) + ) sub.needs_unread_recalc = True - sub.save() \ No newline at end of file + sub.save() diff --git a/apps/rss_feeds/management/commands/query_popularity.py b/apps/rss_feeds/management/commands/query_popularity.py index 65acd70fc6..5afc874935 100644 --- a/apps/rss_feeds/management/commands/query_popularity.py +++ b/apps/rss_feeds/management/commands/query_popularity.py @@ -8,8 +8,8 @@ import re import datetime -class Command(BaseCommand): +class Command(BaseCommand): def add_argument(self, parser): parser.add_argument("-q", "--query", dest="query", help="Search query") parser.add_argument("-l", "--limit", dest="limit", type="int", default=1000, help="Limit of stories") @@ -18,4 +18,4 @@ def handle(self, *args, **options): # settings.LOG_TO_STREAM = True # Feed.query_popularity(options['query'], limit=options['limit']) - Feed.xls_query_popularity(options['query'], limit=options['limit']) \ No newline at end of file + Feed.xls_query_popularity(options["query"], limit=options["limit"]) diff --git a/apps/rss_feeds/management/commands/refresh_feed.py b/apps/rss_feeds/management/commands/refresh_feed.py index 63527d548d..14a1772c1f 100644 --- a/apps/rss_feeds/management/commands/refresh_feed.py +++ b/apps/rss_feeds/management/commands/refresh_feed.py @@ -2,8 +2,8 @@ from apps.rss_feeds.models import Feed from utils.management_functions import daemonize -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None) parser.add_argument("-F", "--force", dest="force", action="store_true") @@ -11,11 +11,11 @@ def add_arguments(self, parser): parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true") def handle(self, *args, **options): - if options['daemonize']: + if options["daemonize"]: daemonize() - - if options['title']: - feed = Feed.objects.get(feed_title__icontains=options['title']) + + if options["title"]: + feed = Feed.objects.get(feed_title__icontains=options["title"]) else: - feed = Feed.get_by_id(options['feed']) - feed.update(force=options['force'], single_threaded=True, verbose=True) + feed = Feed.get_by_id(options["feed"]) + feed.update(force=options["force"], single_threaded=True, verbose=True) diff --git a/apps/rss_feeds/management/commands/refresh_feeds.py b/apps/rss_feeds/management/commands/refresh_feeds.py index 44b2ed5054..ef176beeb2 100644 --- a/apps/rss_feeds/management/commands/refresh_feeds.py +++ b/apps/rss_feeds/management/commands/refresh_feeds.py @@ -13,76 +13,85 @@ class Command(BaseCommand): - def add_arguments(self, parser): parser.add_argument("-f", "--feed", default=None) parser.add_argument("-d", "--daemon", dest="daemonize", action="store_true") parser.add_argument("-F", "--force", dest="force", action="store_true") parser.add_argument("-s", "--single_threaded", dest="single_threaded", action="store_true") - parser.add_argument('-t', '--timeout', type=int, default=10, - help='Wait timeout in seconds when connecting to feeds.') - parser.add_argument('-u', '--username', type=str, dest='username') - parser.add_argument('-V', '--verbose', action='store_true', - dest='verbose', default=False, help='Verbose output.') - parser.add_argument('-S', '--skip', type=int, - dest='skip', default=0, help='Skip stories per month < #.') - parser.add_argument('-w', '--workerthreads', type=int, default=4, - help='Worker threads that will fetch feeds in parallel.') + parser.add_argument( + "-t", "--timeout", type=int, default=10, help="Wait timeout in seconds when connecting to feeds." + ) + parser.add_argument("-u", "--username", type=str, dest="username") + parser.add_argument( + "-V", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose output." + ) + parser.add_argument( + "-S", "--skip", type=int, dest="skip", default=0, help="Skip stories per month < #." + ) + parser.add_argument( + "-w", + "--workerthreads", + type=int, + default=4, + help="Worker threads that will fetch feeds in parallel.", + ) def handle(self, *args, **options): - if options['daemonize']: + if options["daemonize"]: daemonize() - + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() - - if options['skip']: - feeds = Feed.objects.filter(next_scheduled_update__lte=now, - average_stories_per_month__lt=options['skip'], - active=True) + + if options["skip"]: + feeds = Feed.objects.filter( + next_scheduled_update__lte=now, average_stories_per_month__lt=options["skip"], active=True + ) print(" ---> Skipping %s feeds" % feeds.count()) for feed in feeds: feed.set_next_scheduled_update() - print('.', end=' ') + print(".", end=" ") return - - socket.setdefaulttimeout(options['timeout']) - if options['force']: + + socket.setdefaulttimeout(options["timeout"]) + if options["force"]: feeds = Feed.objects.all() - elif options['username']: - usersubs = UserSubscription.objects.filter(user=User.objects.get(username=options['username']), active=True) - feeds = Feed.objects.filter(pk__in=usersubs.values('feed_id')) - elif options['feed']: - feeds = Feed.objects.filter(pk=options['feed']) + elif options["username"]: + usersubs = UserSubscription.objects.filter( + user=User.objects.get(username=options["username"]), active=True + ) + feeds = Feed.objects.filter(pk__in=usersubs.values("feed_id")) + elif options["feed"]: + feeds = Feed.objects.filter(pk=options["feed"]) else: feeds = Feed.objects.filter(next_scheduled_update__lte=now, active=True) - - feeds = feeds.order_by('?') - + + feeds = feeds.order_by("?") + for f in feeds: f.set_next_scheduled_update() - - num_workers = min(len(feeds), options['workerthreads']) - if options['single_threaded']: + + num_workers = min(len(feeds), options["workerthreads"]) + if options["single_threaded"]: num_workers = 1 - - options['compute_scores'] = True - options['quick'] = float(MStatistics.get('quick_fetch', 0)) - options['updates_off'] = MStatistics.get('updates_off', False) - - disp = feed_fetcher.Dispatcher(options, num_workers) - + + options["compute_scores"] = True + options["quick"] = float(MStatistics.get("quick_fetch", 0)) + options["updates_off"] = MStatistics.get("updates_off", False) + + disp = feed_fetcher.Dispatcher(options, num_workers) + feeds_queue = [] for _ in range(num_workers): feeds_queue.append([]) - + i = 0 for feed in feeds: - feeds_queue[i%num_workers].append(feed.pk) + feeds_queue[i % num_workers].append(feed.pk) i += 1 disp.add_jobs(feeds_queue, i) - + django.db.connection.close() - + print(" ---> Fetching %s feeds..." % feeds.count()) disp.run_jobs() diff --git a/apps/rss_feeds/management/commands/task_feeds.py b/apps/rss_feeds/management/commands/task_feeds.py index 794d8184bd..664c250ea6 100644 --- a/apps/rss_feeds/management/commands/task_feeds.py +++ b/apps/rss_feeds/management/commands/task_feeds.py @@ -5,16 +5,22 @@ class Command(BaseCommand): - def add_arguments(self, parser): parser.add_argument("-f", "--feed", default=None) - parser.add_argument("-a", "--all", default=False, action='store_true') - parser.add_argument("-b", "--broken", help="Task broken feeds that havent been fetched in a day.", default=False, action='store_true') - parser.add_argument('-V', '--verbose', action='store_true', - dest='verbose', default=False, help='Verbose output.') - + parser.add_argument("-a", "--all", default=False, action="store_true") + parser.add_argument( + "-b", + "--broken", + help="Task broken feeds that havent been fetched in a day.", + default=False, + action="store_true", + ) + parser.add_argument( + "-V", "--verbose", action="store_true", dest="verbose", default=False, help="Verbose output." + ) + def handle(self, *args, **options): - if options['broken']: + if options["broken"]: TaskBrokenFeeds.apply() else: TaskFeeds.apply() diff --git a/apps/rss_feeds/management/commands/trim_feeds.py b/apps/rss_feeds/management/commands/trim_feeds.py index 6d6c1090bc..2cff87ad83 100644 --- a/apps/rss_feeds/management/commands/trim_feeds.py +++ b/apps/rss_feeds/management/commands/trim_feeds.py @@ -2,27 +2,23 @@ from apps.rss_feeds.models import Feed import gc -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-f", "--feed", dest="feed", default=None), def handle(self, *args, **options): - if not options['feed']: - feeds = Feed.objects.filter( - fetched_once=True, - active_subscribers=0, - premium_subscribers=0 - ) + if not options["feed"]: + feeds = Feed.objects.filter(fetched_once=True, active_subscribers=0, premium_subscribers=0) else: - feeds = Feed.objects.filter(feed_id=options['feed']) + feeds = Feed.objects.filter(feed_id=options["feed"]) for f in queryset_iterator(feeds): f.trim_feed(verbose=True) - + def queryset_iterator(queryset, chunksize=100): - ''' + """ Iterate over a Django Queryset ordered by the primary key This method loads a maximum of chunksize (default: 1000) rows in it's @@ -31,12 +27,12 @@ def queryset_iterator(queryset, chunksize=100): classes. Note that the implementation of the iterator does not support ordered query sets. - ''' - last_pk = queryset.order_by('-pk')[0].pk - queryset = queryset.order_by('pk') + """ + last_pk = queryset.order_by("-pk")[0].pk + queryset = queryset.order_by("pk") pk = queryset[0].pk while pk < last_pk: for row in queryset.filter(pk__gte=pk, pk__lte=last_pk)[:chunksize]: yield row pk += chunksize - gc.collect() \ No newline at end of file + gc.collect() diff --git a/apps/rss_feeds/migrations/0001_initial.py b/apps/rss_feeds/migrations/0001_initial.py index e2e0255cd3..5741ee8b52 100644 --- a/apps/rss_feeds/migrations/0001_initial.py +++ b/apps/rss_feeds/migrations/0001_initial.py @@ -6,83 +6,107 @@ class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='DuplicateFeed', + name="DuplicateFeed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('duplicate_address', models.CharField(db_index=True, max_length=764)), - ('duplicate_link', models.CharField(db_index=True, max_length=764, null=True)), - ('duplicate_feed_id', models.CharField(db_index=True, max_length=255, null=True)), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("duplicate_address", models.CharField(db_index=True, max_length=764)), + ("duplicate_link", models.CharField(db_index=True, max_length=764, null=True)), + ("duplicate_feed_id", models.CharField(db_index=True, max_length=255, null=True)), ], ), migrations.CreateModel( - name='Feed', + name="Feed", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feed_address', models.URLField(db_index=True, max_length=764)), - ('feed_address_locked', models.NullBooleanField(default=False)), - ('feed_link', models.URLField(blank=True, default='', max_length=1000, null=True)), - ('feed_link_locked', models.BooleanField(default=False)), - ('hash_address_and_link', models.CharField(max_length=64, unique=True)), - ('feed_title', models.CharField(blank=True, default='[Untitled]', max_length=255, null=True)), - ('is_push', models.NullBooleanField(default=False)), - ('active', models.BooleanField(db_index=True, default=True)), - ('num_subscribers', models.IntegerField(default=-1)), - ('active_subscribers', models.IntegerField(db_index=True, default=-1)), - ('premium_subscribers', models.IntegerField(default=-1)), - ('active_premium_subscribers', models.IntegerField(default=-1)), - ('last_update', models.DateTimeField(db_index=True)), - ('next_scheduled_update', models.DateTimeField()), - ('last_story_date', models.DateTimeField(blank=True, null=True)), - ('fetched_once', models.BooleanField(default=False)), - ('known_good', models.BooleanField(default=False)), - ('has_feed_exception', models.BooleanField(db_index=True, default=False)), - ('has_page_exception', models.BooleanField(db_index=True, default=False)), - ('has_page', models.BooleanField(default=True)), - ('exception_code', models.IntegerField(default=0)), - ('errors_since_good', models.IntegerField(default=0)), - ('min_to_decay', models.IntegerField(default=0)), - ('days_to_trim', models.IntegerField(default=90)), - ('creation', models.DateField(auto_now_add=True)), - ('etag', models.CharField(blank=True, max_length=255, null=True)), - ('last_modified', models.DateTimeField(blank=True, null=True)), - ('stories_last_month', models.IntegerField(default=0)), - ('average_stories_per_month', models.IntegerField(default=0)), - ('last_load_time', models.IntegerField(default=0)), - ('favicon_color', models.CharField(blank=True, max_length=6, null=True)), - ('favicon_not_found', models.BooleanField(default=False)), - ('s3_page', models.NullBooleanField(default=False)), - ('s3_icon', models.NullBooleanField(default=False)), - ('search_indexed', models.NullBooleanField(default=None)), - ('branch_from_feed', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feed_address", models.URLField(db_index=True, max_length=764)), + ("feed_address_locked", models.NullBooleanField(default=False)), + ("feed_link", models.URLField(blank=True, default="", max_length=1000, null=True)), + ("feed_link_locked", models.BooleanField(default=False)), + ("hash_address_and_link", models.CharField(max_length=64, unique=True)), + ("feed_title", models.CharField(blank=True, default="[Untitled]", max_length=255, null=True)), + ("is_push", models.NullBooleanField(default=False)), + ("active", models.BooleanField(db_index=True, default=True)), + ("num_subscribers", models.IntegerField(default=-1)), + ("active_subscribers", models.IntegerField(db_index=True, default=-1)), + ("premium_subscribers", models.IntegerField(default=-1)), + ("active_premium_subscribers", models.IntegerField(default=-1)), + ("last_update", models.DateTimeField(db_index=True)), + ("next_scheduled_update", models.DateTimeField()), + ("last_story_date", models.DateTimeField(blank=True, null=True)), + ("fetched_once", models.BooleanField(default=False)), + ("known_good", models.BooleanField(default=False)), + ("has_feed_exception", models.BooleanField(db_index=True, default=False)), + ("has_page_exception", models.BooleanField(db_index=True, default=False)), + ("has_page", models.BooleanField(default=True)), + ("exception_code", models.IntegerField(default=0)), + ("errors_since_good", models.IntegerField(default=0)), + ("min_to_decay", models.IntegerField(default=0)), + ("days_to_trim", models.IntegerField(default=90)), + ("creation", models.DateField(auto_now_add=True)), + ("etag", models.CharField(blank=True, max_length=255, null=True)), + ("last_modified", models.DateTimeField(blank=True, null=True)), + ("stories_last_month", models.IntegerField(default=0)), + ("average_stories_per_month", models.IntegerField(default=0)), + ("last_load_time", models.IntegerField(default=0)), + ("favicon_color", models.CharField(blank=True, max_length=6, null=True)), + ("favicon_not_found", models.BooleanField(default=False)), + ("s3_page", models.NullBooleanField(default=False)), + ("s3_icon", models.NullBooleanField(default=False)), + ("search_indexed", models.NullBooleanField(default=None)), + ( + "branch_from_feed", + models.ForeignKey( + blank=True, + null=True, + on_delete=django.db.models.deletion.CASCADE, + to="rss_feeds.Feed", + ), + ), ], options={ - 'db_table': 'feeds', - 'ordering': ['feed_title'], + "db_table": "feeds", + "ordering": ["feed_title"], }, ), migrations.CreateModel( - name='FeedData', + name="FeedData", fields=[ - ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('feed_tagline', models.CharField(blank=True, max_length=1024, null=True)), - ('story_count_history', models.TextField(blank=True, null=True)), - ('feed_classifier_counts', models.TextField(blank=True, null=True)), - ('popular_tags', models.CharField(blank=True, max_length=1024, null=True)), - ('popular_authors', models.CharField(blank=True, max_length=2048, null=True)), - ('feed', utils.fields.AutoOneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='data', to='rss_feeds.Feed')), + ( + "id", + models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID"), + ), + ("feed_tagline", models.CharField(blank=True, max_length=1024, null=True)), + ("story_count_history", models.TextField(blank=True, null=True)), + ("feed_classifier_counts", models.TextField(blank=True, null=True)), + ("popular_tags", models.CharField(blank=True, max_length=1024, null=True)), + ("popular_authors", models.CharField(blank=True, max_length=2048, null=True)), + ( + "feed", + utils.fields.AutoOneToOneField( + on_delete=django.db.models.deletion.CASCADE, related_name="data", to="rss_feeds.Feed" + ), + ), ], ), migrations.AddField( - model_name='duplicatefeed', - name='feed', - field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='duplicate_addresses', to='rss_feeds.Feed'), + model_name="duplicatefeed", + name="feed", + field=models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="duplicate_addresses", + to="rss_feeds.Feed", + ), ), ] diff --git a/apps/rss_feeds/migrations/0002_remove_mongo_types.py b/apps/rss_feeds/migrations/0002_remove_mongo_types.py index a6b98c7ba2..e34e70f9b8 100644 --- a/apps/rss_feeds/migrations/0002_remove_mongo_types.py +++ b/apps/rss_feeds/migrations/0002_remove_mongo_types.py @@ -3,28 +3,27 @@ from django.db import migrations from django.conf import settings + def remove_mongo_types(apps, schema_editor): db = settings.MONGODB.newsblur_dev collections = db.collection_names() for collection_name in collections: collection = db[collection_name] print(" ---> %s..." % (collection_name)) - if 'system' in collection_name: continue + if "system" in collection_name: + continue collection.update({}, {"$unset": {"_types": 1}}, multi=True) index_information = collection.index_information() - indexes_to_drop = [key for key, value in index_information.items() - if 'types' in value] + indexes_to_drop = [key for key, value in index_information.items() if "types" in value] # print(index_information, indexes_) for index in indexes_to_drop: print(" ---> Dropping mongo index %s on %s..." % (index, collection_name)) collection.drop_index(index) -class Migration(migrations.Migration): +class Migration(migrations.Migration): dependencies = [ - ('rss_feeds', '0001_initial'), + ("rss_feeds", "0001_initial"), ] - operations = [ - migrations.RunPython(remove_mongo_types, migrations.RunPython.noop) - ] + operations = [migrations.RunPython(remove_mongo_types, migrations.RunPython.noop)] diff --git a/apps/rss_feeds/migrations/0003_auto_20220110_2105.py b/apps/rss_feeds/migrations/0003_auto_20220110_2105.py index 9986d3c7ec..e13a80de41 100644 --- a/apps/rss_feeds/migrations/0003_auto_20220110_2105.py +++ b/apps/rss_feeds/migrations/0003_auto_20220110_2105.py @@ -4,35 +4,34 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0002_remove_mongo_types'), + ("rss_feeds", "0002_remove_mongo_types"), ] operations = [ migrations.AlterField( - model_name='feed', - name='feed_address_locked', + model_name="feed", + name="feed_address_locked", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='is_push', + model_name="feed", + name="is_push", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='s3_icon', + model_name="feed", + name="s3_icon", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='s3_page', + model_name="feed", + name="s3_page", field=models.BooleanField(blank=True, default=False, null=True), ), migrations.AlterField( - model_name='feed', - name='search_indexed', + model_name="feed", + name="search_indexed", field=models.BooleanField(blank=True, default=None, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0003_mongo_version_4_0.py b/apps/rss_feeds/migrations/0003_mongo_version_4_0.py index 9a2999904d..7595452d34 100644 --- a/apps/rss_feeds/migrations/0003_mongo_version_4_0.py +++ b/apps/rss_feeds/migrations/0003_mongo_version_4_0.py @@ -3,6 +3,7 @@ from django.db import migrations from django.conf import settings + def set_mongo_feature_compatibility_version(apps, schema_editor): new_version = "4.0" db = settings.MONGODB.admin @@ -13,14 +14,11 @@ def set_mongo_feature_compatibility_version(apps, schema_editor): if old_version != new_version: db.command({"setFeatureCompatibilityVersion": new_version}) print(f" ---> Updated MongoDB featureCompatibilityVersion: {new_version}") - -class Migration(migrations.Migration): +class Migration(migrations.Migration): dependencies = [ - ('rss_feeds', '0002_remove_mongo_types'), + ("rss_feeds", "0002_remove_mongo_types"), ] - operations = [ - migrations.RunPython(set_mongo_feature_compatibility_version, migrations.RunPython.noop) - ] + operations = [migrations.RunPython(set_mongo_feature_compatibility_version, migrations.RunPython.noop)] diff --git a/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py b/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py index 7579e56ff1..35bc6e6d41 100644 --- a/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py +++ b/apps/rss_feeds/migrations/0004_feed_pro_subscribers.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0003_auto_20220110_2105'), + ("rss_feeds", "0003_auto_20220110_2105"), ] operations = [ migrations.AddField( - model_name='feed', - name='pro_subscribers', + model_name="feed", + name="pro_subscribers", field=models.IntegerField(blank=True, default=0, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py b/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py index 1d8152591f..3d877b2bd0 100644 --- a/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py +++ b/apps/rss_feeds/migrations/0005_feed_archive_subscribers.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0004_feed_pro_subscribers'), + ("rss_feeds", "0004_feed_pro_subscribers"), ] operations = [ migrations.AddField( - model_name='feed', - name='archive_subscribers', + model_name="feed", + name="archive_subscribers", field=models.IntegerField(blank=True, default=0, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py b/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py index cebc86363a..4123d3f8fb 100644 --- a/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py +++ b/apps/rss_feeds/migrations/0006_feed_fs_size_bytes.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0005_feed_archive_subscribers'), + ("rss_feeds", "0005_feed_archive_subscribers"), ] operations = [ migrations.AddField( - model_name='feed', - name='fs_size_bytes', + model_name="feed", + name="fs_size_bytes", field=models.IntegerField(blank=True, null=True), ), ] diff --git a/apps/rss_feeds/migrations/0007_merge_20220517_1355.py b/apps/rss_feeds/migrations/0007_merge_20220517_1355.py index f9e6e7bdeb..ae30775afc 100644 --- a/apps/rss_feeds/migrations/0007_merge_20220517_1355.py +++ b/apps/rss_feeds/migrations/0007_merge_20220517_1355.py @@ -4,11 +4,9 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0006_feed_fs_size_bytes'), - ('rss_feeds', '0003_mongo_version_4_0'), + ("rss_feeds", "0006_feed_fs_size_bytes"), + ("rss_feeds", "0003_mongo_version_4_0"), ] - operations = [ - ] + operations = [] diff --git a/apps/rss_feeds/migrations/0008_feed_archive_count.py b/apps/rss_feeds/migrations/0008_feed_archive_count.py index 0450de50f7..bc7becf63c 100644 --- a/apps/rss_feeds/migrations/0008_feed_archive_count.py +++ b/apps/rss_feeds/migrations/0008_feed_archive_count.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('rss_feeds', '0007_merge_20220517_1355'), + ("rss_feeds", "0007_merge_20220517_1355"), ] operations = [ migrations.AddField( - model_name='feed', - name='archive_count', + model_name="feed", + name="archive_count", field=models.IntegerField(blank=True, null=True), ), ] diff --git a/apps/rss_feeds/models.py b/apps/rss_feeds/models.py index 41c71fa07a..89a0999a8d 100755 --- a/apps/rss_feeds/models.py +++ b/apps/rss_feeds/models.py @@ -18,6 +18,7 @@ from operator import itemgetter from bson.objectid import ObjectId from bs4 import BeautifulSoup + # from nltk.collocations import TrigramCollocationFinder, BigramCollocationFinder, TrigramAssocMeasures, BigramAssocMeasures from django.db import models from django.db import IntegrityError @@ -69,7 +70,9 @@ class Feed(models.Model): archive_subscribers = models.IntegerField(default=0, null=True, blank=True) pro_subscribers = models.IntegerField(default=0, null=True, blank=True) active_premium_subscribers = models.IntegerField(default=-1) - branch_from_feed = models.ForeignKey('Feed', blank=True, null=True, db_index=True, on_delete=models.CASCADE) + branch_from_feed = models.ForeignKey( + "Feed", blank=True, null=True, db_index=True, on_delete=models.CASCADE + ) last_update = models.DateTimeField(db_index=True) next_scheduled_update = models.DateTimeField() last_story_date = models.DateTimeField(null=True, blank=True) @@ -97,18 +100,18 @@ class Feed(models.Model): archive_count = models.IntegerField(null=True, blank=True) class Meta: - db_table="feeds" - ordering=["feed_title"] + db_table = "feeds" + ordering = ["feed_title"] # unique_together=[('feed_address', 'feed_link')] - + def __str__(self): if not self.feed_title: self.feed_title = "[Untitled]" self.save() return "%s%s: %s - %s/%s/%s/%s/%s %s stories (%s bytes)" % ( - self.pk, + self.pk, (" [B: %s]" % self.branch_from_feed.pk if self.branch_from_feed else ""), - self.feed_title, + self.feed_title, self.num_subscribers, self.active_subscribers, self.active_premium_subscribers, @@ -116,46 +119,43 @@ def __str__(self): self.pro_subscribers, self.archive_count, self.fs_size_bytes, - ) - + ) + @property def title(self): title = self.feed_title or "[Untitled]" if self.active_premium_subscribers >= 1: title = "%s*" % title[:29] return title - + @property def log_title(self): return self.__str__() - + @property def permalink(self): return "%s/site/%s/%s" % (settings.NEWSBLUR_URL, self.pk, slugify(self.feed_title.lower()[:50])) - + @property def favicon_url(self): - if settings.BACKED_BY_AWS['icons_on_s3'] and self.s3_icon: + if settings.BACKED_BY_AWS["icons_on_s3"] and self.s3_icon: return "https://s3.amazonaws.com/%s/%s.png" % (settings.S3_ICONS_BUCKET_NAME, self.pk) - return reverse('feed-favicon', kwargs={'feed_id': self.pk}) - + return reverse("feed-favicon", kwargs={"feed_id": self.pk}) + @property def favicon_url_fqdn(self): - if settings.BACKED_BY_AWS['icons_on_s3'] and self.s3_icon: + if settings.BACKED_BY_AWS["icons_on_s3"] and self.s3_icon: return self.favicon_url - return "https://%s%s" % ( - Site.objects.get_current().domain, - self.favicon_url - ) - + return "https://%s%s" % (Site.objects.get_current().domain, self.favicon_url) + @property def s3_pages_key(self): return "%s.gz.html" % self.pk - + @property def s3_icons_key(self): return "%s.png" % self.pk - + @property def unread_cutoff(self): if self.archive_subscribers and self.archive_subscribers > 0: @@ -164,117 +164,121 @@ def unread_cutoff(self): return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) return datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD_FREE) - + @classmethod def days_of_story_hashes_for_feed(cls, feed_id): try: - feed = cls.objects.only('archive_subscribers').get(pk=feed_id) + feed = cls.objects.only("archive_subscribers").get(pk=feed_id) return feed.days_of_story_hashes except cls.DoesNotExist: return settings.DAYS_OF_STORY_HASHES - + @property def days_of_story_hashes(self): if self.archive_subscribers and self.archive_subscribers > 0: return settings.DAYS_OF_STORY_HASHES_ARCHIVE return settings.DAYS_OF_STORY_HASHES - + @property def story_hashes_in_unread_cutoff(self): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - current_time = int(time.time() + 60*60*24) - unread_cutoff = self.unread_cutoff.strftime('%s') - story_hashes = r.zrevrangebyscore('zF:%s' % self.pk, current_time, unread_cutoff) + current_time = int(time.time() + 60 * 60 * 24) + unread_cutoff = self.unread_cutoff.strftime("%s") + story_hashes = r.zrevrangebyscore("zF:%s" % self.pk, current_time, unread_cutoff) return story_hashes - + @classmethod def generate_hash_address_and_link(cls, feed_address, feed_link): - if not feed_address: feed_address = "" - if not feed_link: feed_link = "" - return hashlib.sha1((feed_address+feed_link).encode(encoding='utf-8')).hexdigest() - + if not feed_address: + feed_address = "" + if not feed_link: + feed_link = "" + return hashlib.sha1((feed_address + feed_link).encode(encoding="utf-8")).hexdigest() + @property def is_newsletter(self): - return self.feed_address.startswith('newsletter:') or self.feed_address.startswith('http://newsletter:') - + return self.feed_address.startswith("newsletter:") or self.feed_address.startswith( + "http://newsletter:" + ) + def canonical(self, full=False, include_favicon=True): feed = { - 'id': self.pk, - 'feed_title': self.feed_title, - 'feed_address': self.feed_address, - 'feed_link': self.feed_link, - 'num_subscribers': self.num_subscribers, - 'updated': relative_timesince(self.last_update), - 'updated_seconds_ago': seconds_timesince(self.last_update), - 'fs_size_bytes': self.fs_size_bytes, - 'archive_count': self.archive_count, - 'last_story_date': self.last_story_date, - 'last_story_seconds_ago': seconds_timesince(self.last_story_date), - 'stories_last_month': self.stories_last_month, - 'average_stories_per_month': self.average_stories_per_month, - 'min_to_decay': self.min_to_decay, - 'subs': self.num_subscribers, - 'is_push': self.is_push, - 'is_newsletter': self.is_newsletter, - 'fetched_once': self.fetched_once, - 'search_indexed': self.search_indexed, - 'not_yet_fetched': not self.fetched_once, # Legacy. Doh. - 'favicon_color': self.favicon_color, - 'favicon_fade': self.favicon_fade(), - 'favicon_border': self.favicon_border(), - 'favicon_text_color': self.favicon_text_color(), - 'favicon_fetching': self.favicon_fetching, - 'favicon_url': self.favicon_url, - 's3_page': self.s3_page, - 's3_icon': self.s3_icon, - 'disabled_page': not self.has_page, + "id": self.pk, + "feed_title": self.feed_title, + "feed_address": self.feed_address, + "feed_link": self.feed_link, + "num_subscribers": self.num_subscribers, + "updated": relative_timesince(self.last_update), + "updated_seconds_ago": seconds_timesince(self.last_update), + "fs_size_bytes": self.fs_size_bytes, + "archive_count": self.archive_count, + "last_story_date": self.last_story_date, + "last_story_seconds_ago": seconds_timesince(self.last_story_date), + "stories_last_month": self.stories_last_month, + "average_stories_per_month": self.average_stories_per_month, + "min_to_decay": self.min_to_decay, + "subs": self.num_subscribers, + "is_push": self.is_push, + "is_newsletter": self.is_newsletter, + "fetched_once": self.fetched_once, + "search_indexed": self.search_indexed, + "not_yet_fetched": not self.fetched_once, # Legacy. Doh. + "favicon_color": self.favicon_color, + "favicon_fade": self.favicon_fade(), + "favicon_border": self.favicon_border(), + "favicon_text_color": self.favicon_text_color(), + "favicon_fetching": self.favicon_fetching, + "favicon_url": self.favicon_url, + "s3_page": self.s3_page, + "s3_icon": self.s3_icon, + "disabled_page": not self.has_page, } - + if include_favicon: try: feed_icon = MFeedIcon.objects.get(feed_id=self.pk) - feed['favicon'] = feed_icon.data + feed["favicon"] = feed_icon.data except MFeedIcon.DoesNotExist: pass if self.has_page_exception or self.has_feed_exception: - feed['has_exception'] = True - feed['exception_type'] = 'feed' if self.has_feed_exception else 'page' - feed['exception_code'] = self.exception_code + feed["has_exception"] = True + feed["exception_type"] = "feed" if self.has_feed_exception else "page" + feed["exception_code"] = self.exception_code elif full: - feed['has_exception'] = False - feed['exception_type'] = None - feed['exception_code'] = self.exception_code - + feed["has_exception"] = False + feed["exception_type"] = None + feed["exception_code"] = self.exception_code + if full: - feed['average_stories_per_month'] = self.average_stories_per_month - feed['tagline'] = self.data.feed_tagline - feed['feed_tags'] = json.decode(self.data.popular_tags) if self.data.popular_tags else [] - feed['feed_authors'] = json.decode(self.data.popular_authors) if self.data.popular_authors else [] - + feed["average_stories_per_month"] = self.average_stories_per_month + feed["tagline"] = self.data.feed_tagline + feed["feed_tags"] = json.decode(self.data.popular_tags) if self.data.popular_tags else [] + feed["feed_authors"] = json.decode(self.data.popular_authors) if self.data.popular_authors else [] + return feed - + def save(self, *args, **kwargs): if not self.last_update: self.last_update = datetime.datetime.utcnow() if not self.next_scheduled_update: self.next_scheduled_update = datetime.datetime.utcnow() self.fix_google_alerts_urls() - + feed_address = self.feed_address or "" feed_link = self.feed_link or "" self.hash_address_and_link = self.generate_hash_address_and_link(feed_address, feed_link) - - max_feed_title = Feed._meta.get_field('feed_title').max_length + + max_feed_title = Feed._meta.get_field("feed_title").max_length if len(self.feed_title) > max_feed_title: self.feed_title = self.feed_title[:max_feed_title] - max_feed_address = Feed._meta.get_field('feed_address').max_length + max_feed_address = Feed._meta.get_field("feed_address").max_length if len(feed_address) > max_feed_address: self.feed_address = feed_address[:max_feed_address] - max_feed_link = Feed._meta.get_field('feed_link').max_length + max_feed_link = Feed._meta.get_field("feed_link").max_length if len(feed_link) > max_feed_link: self.feed_link = feed_link[:max_feed_link] - + try: super(Feed, self).save(*args, **kwargs) except IntegrityError as e: @@ -284,108 +288,123 @@ def save(self, *args, **kwargs): hash_address_and_link = self.generate_hash_address_and_link(feed_address, feed_link) logging.debug(" ---> ~FRNo dupes, checking hash collision: %s" % hash_address_and_link) duplicate_feeds = Feed.objects.filter(hash_address_and_link=hash_address_and_link) - + if not duplicate_feeds: - duplicate_feeds = Feed.objects.filter(feed_address=self.feed_address, - feed_link=self.feed_link) + duplicate_feeds = Feed.objects.filter( + feed_address=self.feed_address, feed_link=self.feed_link + ) if not duplicate_feeds: # Feed has been deleted. Just ignore it. - logging.debug(" ***> Changed to: %s - %s: %s" % (self.feed_address, self.feed_link, duplicate_feeds)) - logging.debug(' ***> [%-30s] Feed deleted (%s).' % (self.log_title[:30], self.pk)) + logging.debug( + " ***> Changed to: %s - %s: %s" % (self.feed_address, self.feed_link, duplicate_feeds) + ) + logging.debug(" ***> [%-30s] Feed deleted (%s)." % (self.log_title[:30], self.pk)) return - + for duplicate_feed in duplicate_feeds: if duplicate_feed.pk != self.pk: - logging.debug(" ---> ~FRFound different feed (%s), merging %s in..." % (duplicate_feeds[0], self.pk)) + logging.debug( + " ---> ~FRFound different feed (%s), merging %s in..." % (duplicate_feeds[0], self.pk) + ) feed = Feed.get_by_id(merge_feeds(duplicate_feeds[0].pk, self.pk)) return feed else: logging.debug(" ---> ~FRFeed is its own dupe? %s == %s" % (self, duplicate_feeds)) except DatabaseError as e: - logging.debug(" ---> ~FBFeed update failed, no change: %s / %s..." % (kwargs.get('update_fields', None), e)) + logging.debug( + " ---> ~FBFeed update failed, no change: %s / %s..." % (kwargs.get("update_fields", None), e) + ) pass - + return self - + @classmethod def index_all_for_search(cls, offset=0, subscribers=2): if not offset: SearchFeed.create_elasticsearch_mapping(delete=True) - - last_pk = cls.objects.latest('pk').pk + + last_pk = cls.objects.latest("pk").pk for f in range(offset, last_pk, 1000): - print(" ---> {f} / {last_pk} ({pct}%)".format(f=f, last_pk=last_pk, pct=str(float(f)/last_pk*100)[:2])) - feeds = Feed.objects.filter(pk__in=range(f, f+1000), - active=True, - active_subscribers__gte=subscribers)\ - .values_list('pk') - for feed_id, in feeds: + print( + " ---> {f} / {last_pk} ({pct}%)".format( + f=f, last_pk=last_pk, pct=str(float(f) / last_pk * 100)[:2] + ) + ) + feeds = Feed.objects.filter( + pk__in=range(f, f + 1000), active=True, active_subscribers__gte=subscribers + ).values_list("pk") + for (feed_id,) in feeds: Feed.objects.get(pk=feed_id).index_feed_for_search() - + def index_feed_for_search(self): min_subscribers = 1 if settings.DEBUG: min_subscribers = 0 if self.num_subscribers > min_subscribers and not self.branch_from_feed and not self.is_newsletter: - SearchFeed.index(feed_id=self.pk, - title=self.feed_title, - address=self.feed_address, - link=self.feed_link, - num_subscribers=self.num_subscribers) - + SearchFeed.index( + feed_id=self.pk, + title=self.feed_title, + address=self.feed_address, + link=self.feed_link, + num_subscribers=self.num_subscribers, + ) + def index_stories_for_search(self): - if self.search_indexed: return - + if self.search_indexed: + return + stories = MStory.objects(story_feed_id=self.pk) for story in stories: story.index_story_for_search() self.search_indexed = True self.save() - + def sync_redis(self): return MStory.sync_feed_redis(self.pk) - + def expire_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - r.expire('F:%s' % self.pk, self.days_of_story_hashes*24*60*60) - r.expire('zF:%s' % self.pk, self.days_of_story_hashes*24*60*60) - + r.expire("F:%s" % self.pk, self.days_of_story_hashes * 24 * 60 * 60) + r.expire("zF:%s" % self.pk, self.days_of_story_hashes * 24 * 60 * 60) + @classmethod def low_volume_feeds(cls, feed_ids, stories_per_month=30): try: stories_per_month = int(stories_per_month) except ValueError: stories_per_month = 30 - feeds = Feed.objects.filter(pk__in=feed_ids, average_stories_per_month__lte=stories_per_month).only('pk') - + feeds = Feed.objects.filter(pk__in=feed_ids, average_stories_per_month__lte=stories_per_month).only( + "pk" + ) + return [f.pk for f in feeds] - + @classmethod def autocomplete(self, prefix, limit=5): results = SearchFeed.query(prefix) - feed_ids = [result['_source']['feed_id'] for result in results[:5]] + feed_ids = [result["_source"]["feed_id"] for result in results[:5]] # results = SearchQuerySet().autocomplete(address=prefix).order_by('-num_subscribers')[:limit] - # + # # if len(results) < limit: # results += SearchQuerySet().autocomplete(title=prefix).order_by('-num_subscribers')[:limit-len(results)] - # + # return feed_ids - + @classmethod def find_or_create(cls, feed_address, feed_link, defaults=None, **kwargs): feeds = cls.objects.filter(feed_address=feed_address, feed_link=feed_link) if feeds: return feeds[0], False - if feed_link and feed_link.endswith('/'): + if feed_link and feed_link.endswith("/"): feeds = cls.objects.filter(feed_address=feed_address, feed_link=feed_link[:-1]) if feeds: return feeds[0], False - + try: feed = cls.objects.get(feed_address=feed_address, feed_link=feed_link) return feed, False @@ -393,34 +412,33 @@ def find_or_create(cls, feed_address, feed_link, defaults=None, **kwargs): feed = cls(**defaults) feed = feed.save() return feed, True - + @classmethod def merge_feeds(cls, *args, **kwargs): return merge_feeds(*args, **kwargs) - + def fix_google_alerts_urls(self): - if (self.feed_address.startswith('http://user/') and - '/state/com.google/alerts/' in self.feed_address): + if self.feed_address.startswith("http://user/") and "/state/com.google/alerts/" in self.feed_address: match = re.match(r"http://user/(\d+)/state/com.google/alerts/(\d+)", self.feed_address) if match: user_id, alert_id = match.groups() self.feed_address = "http://www.google.com/alerts/feeds/%s/%s" % (user_id, alert_id) - + @classmethod def schedule_feed_fetches_immediately(cls, feed_ids, user_id=None): if settings.DEBUG: - logging.info(" ---> ~SN~FMSkipping the scheduling immediate fetch of ~SB%s~SN feeds (in DEBUG)..." % - len(feed_ids)) + logging.info( + " ---> ~SN~FMSkipping the scheduling immediate fetch of ~SB%s~SN feeds (in DEBUG)..." + % len(feed_ids) + ) return - + if user_id: user = User.objects.get(pk=user_id) - logging.user(user, "~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % - len(feed_ids)) + logging.user(user, "~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % len(feed_ids)) else: - logging.debug(" ---> ~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % - len(feed_ids)) - + logging.debug(" ---> ~SN~FMScheduling immediate fetch of ~SB%s~SN feeds..." % len(feed_ids)) + if len(feed_ids) > 100: logging.debug(" ---> ~SN~FMFeeds scheduled: %s" % feed_ids) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) @@ -430,72 +448,78 @@ def schedule_feed_fetches_immediately(cls, feed_ids, user_id=None): feed.count_subscribers() if not feed.active or feed.next_scheduled_update < day_ago: feed.schedule_feed_fetch_immediately(verbose=False) - + @property def favicon_fetching(self): return bool(not (self.favicon_not_found or self.favicon_color)) - + @classmethod def get_feed_by_url(self, *args, **kwargs): return self.get_feed_from_url(*args, **kwargs) - + @classmethod - def get_feed_from_url(cls, url, create=True, aggressive=False, fetch=True, offset=0, user=None, interactive=False): + def get_feed_from_url( + cls, url, create=True, aggressive=False, fetch=True, offset=0, user=None, interactive=False + ): feed = None without_rss = False original_url = url - - if url and url.startswith('newsletter:'): + + if url and url.startswith("newsletter:"): try: return cls.objects.get(feed_address=url) except cls.MultipleObjectsReturned: return cls.objects.filter(feed_address=url)[0] - if url and re.match('(https?://)?twitter.com/\w+/?', url): + if url and re.match("(https?://)?twitter.com/\w+/?", url): without_rss = True - if url and re.match(r'(https?://)?(www\.)?facebook.com/\w+/?$', url): + if url and re.match(r"(https?://)?(www\.)?facebook.com/\w+/?$", url): without_rss = True # Turn url @username@domain.com into domain.com/users/username.rss - if url and url.startswith('@') and '@' in url[1:]: - username, domain = url[1:].split('@') + if url and url.startswith("@") and "@" in url[1:]: + username, domain = url[1:].split("@") url = f"https://{domain}/users/{username}.rss" - if url and 'youtube.com/user/' in url: - username = re.search('youtube.com/user/(\w+)', url).group(1) + if url and "youtube.com/user/" in url: + username = re.search("youtube.com/user/(\w+)", url).group(1) url = "http://gdata.youtube.com/feeds/base/users/%s/uploads" % username without_rss = True - if url and 'youtube.com/@' in url: - username = url.split('youtube.com/@')[1] + if url and "youtube.com/@" in url: + username = url.split("youtube.com/@")[1] url = "http://gdata.youtube.com/feeds/base/users/%s/uploads" % username without_rss = True - if url and 'youtube.com/channel/' in url: - channel_id = re.search('youtube.com/channel/([-_\w]+)', url).group(1) + if url and "youtube.com/channel/" in url: + channel_id = re.search("youtube.com/channel/([-_\w]+)", url).group(1) url = "https://www.youtube.com/feeds/videos.xml?channel_id=%s" % channel_id without_rss = True - if url and 'youtube.com/feeds' in url: + if url and "youtube.com/feeds" in url: without_rss = True - if url and 'youtube.com/playlist' in url: + if url and "youtube.com/playlist" in url: without_rss = True - + def criteria(key, value): if aggressive: - return {'%s__icontains' % key: value} + return {"%s__icontains" % key: value} else: - return {'%s' % key: value} - + return {"%s" % key: value} + def by_url(address): - feed = cls.objects.filter( - branch_from_feed=None - ).filter(**criteria('feed_address', address)).order_by('-num_subscribers') + feed = ( + cls.objects.filter(branch_from_feed=None) + .filter(**criteria("feed_address", address)) + .order_by("-num_subscribers") + ) if not feed: - duplicate_feed = DuplicateFeed.objects.filter(**criteria('duplicate_address', address)) + duplicate_feed = DuplicateFeed.objects.filter(**criteria("duplicate_address", address)) if duplicate_feed and len(duplicate_feed) > offset: feed = [duplicate_feed[offset].feed] if not feed and aggressive: - feed = cls.objects.filter( - branch_from_feed=None - ).filter(**criteria('feed_link', address)).order_by('-num_subscribers') - + feed = ( + cls.objects.filter(branch_from_feed=None) + .filter(**criteria("feed_link", address)) + .order_by("-num_subscribers") + ) + return feed - + @timelimit(10) def _feedfinder_forman(url): found_feed_urls = feedfinder_forman.find_feeds(url) @@ -505,19 +529,21 @@ def _feedfinder_forman(url): def _feedfinder_pilgrim(url): found_feed_urls = feedfinder_pilgrim.feeds(url) return found_feed_urls - + # Normalize and check for feed_address, dupes, and feed_link url = urlnorm.normalize(url) if not url: logging.debug(" ---> ~FRCouldn't normalize url: ~SB%s" % url) return - + feed = by_url(url) found_feed_urls = [] - + if interactive: - import pdb; pdb.set_trace() - + import pdb + + pdb.set_trace() + # Create if it looks good if feed and len(feed) > offset: feed = feed[offset] @@ -525,15 +551,15 @@ def _feedfinder_pilgrim(url): try: found_feed_urls = _feedfinder_forman(url) except TimeoutError: - logging.debug(' ---> Feed finder timed out...') + logging.debug(" ---> Feed finder timed out...") found_feed_urls = [] if not found_feed_urls: try: found_feed_urls = _feedfinder_pilgrim(url) except TimeoutError: - logging.debug(' ---> Feed finder old timed out...') + logging.debug(" ---> Feed finder old timed out...") found_feed_urls = [] - + if len(found_feed_urls): feed_finder_url = found_feed_urls[0] logging.debug(" ---> Found feed URLs for %s: %s" % (url, found_feed_urls)) @@ -550,17 +576,17 @@ def _feedfinder_pilgrim(url): logging.debug(" ---> Found without_rss feed: %s / %s" % (url, original_url)) feed = cls.objects.create(feed_address=url, feed_link=original_url) feed = feed.update(requesting_user_id=user.pk if user else None) - + # Check for JSON feed if not feed and fetch and create: try: r = requests.get(url) except (requests.ConnectionError, requests.models.InvalidURL): r = None - if r and 'application/json' in r.headers.get('Content-Type'): + if r and "application/json" in r.headers.get("Content-Type"): feed = cls.objects.create(feed_address=url) feed = feed.update() - + # Still nothing? Maybe the URL has some clues. if not feed and fetch and len(found_feed_urls): feed_finder_url = found_feed_urls[0] @@ -570,17 +596,18 @@ def _feedfinder_pilgrim(url): feed = feed.update() elif feed and len(feed) > offset: feed = feed[offset] - + # Not created and not within bounds, so toss results. if isinstance(feed, QuerySet): logging.debug(" ---> ~FRNot created and not within bounds, tossing: ~SB%s" % feed) return - + return feed - + @classmethod def task_feeds(cls, feeds, queue_size=12, verbose=True): - if not feeds: return + if not feeds: + return r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) if isinstance(feeds, Feed): @@ -589,50 +616,50 @@ def task_feeds(cls, feeds, queue_size=12, verbose=True): feeds = [feeds.pk] elif verbose: logging.debug(" ---> ~SN~FBTasking ~SB~FC%s~FB~SN feeds..." % len(feeds)) - + if isinstance(feeds, QuerySet): feeds = [f.pk for f in feeds] - - r.srem('queued_feeds', *feeds) + + r.srem("queued_feeds", *feeds) now = datetime.datetime.now().strftime("%s") p = r.pipeline() for feed_id in feeds: - p.zadd('tasked_feeds', { feed_id: now }) + p.zadd("tasked_feeds", {feed_id: now}) p.execute() - + # for feed_ids in (feeds[pos:pos + queue_size] for pos in xrange(0, len(feeds), queue_size)): for feed_id in feeds: - UpdateFeeds.apply_async(args=(feed_id,), queue='update_feeds') - + UpdateFeeds.apply_async(args=(feed_id,), queue="update_feeds") + @classmethod def drain_task_feeds(cls): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - tasked_feeds = r.zrange('tasked_feeds', 0, -1) + tasked_feeds = r.zrange("tasked_feeds", 0, -1) if tasked_feeds: logging.debug(" ---> ~FRDraining %s tasked feeds..." % len(tasked_feeds)) - r.sadd('queued_feeds', *tasked_feeds) - r.zremrangebyrank('tasked_feeds', 0, -1) + r.sadd("queued_feeds", *tasked_feeds) + r.zremrangebyrank("tasked_feeds", 0, -1) else: logging.debug(" ---> No tasked feeds to drain") - - errored_feeds = r.zrange('error_feeds', 0, -1) + + errored_feeds = r.zrange("error_feeds", 0, -1) if errored_feeds: logging.debug(" ---> ~FRDraining %s errored feeds..." % len(errored_feeds)) - r.sadd('queued_feeds', *errored_feeds) - r.zremrangebyrank('error_feeds', 0, -1) + r.sadd("queued_feeds", *errored_feeds) + r.zremrangebyrank("error_feeds", 0, -1) else: logging.debug(" ---> No errored feeds to drain") def update_all_statistics(self, has_new_stories=False, force=False): - recount = not self.counts_converted_to_redis + recount = not self.counts_converted_to_redis count_extra = False if random.random() < 0.01 or not self.data.popular_tags or not self.data.popular_authors: count_extra = True - + self.count_subscribers(recount=recount) self.calculate_last_story_date() - + if force or has_new_stories or count_extra: self.save_feed_stories_last_month() @@ -642,15 +669,19 @@ def update_all_statistics(self, has_new_stories=False, force=False): if force or (has_new_stories and count_extra): self.save_popular_authors() self.save_popular_tags() - self.save_feed_story_history_statistics() - + self.save_feed_story_history_statistics() + def calculate_last_story_date(self): last_story_date = None try: - latest_story = MStory.objects( - story_feed_id=self.pk - ).limit(1).order_by('-story_date').only('story_date').first() + latest_story = ( + MStory.objects(story_feed_id=self.pk) + .limit(1) + .order_by("-story_date") + .only("story_date") + .first() + ) if latest_story: last_story_date = latest_story.story_date except MStory.DoesNotExist: @@ -658,15 +689,15 @@ def calculate_last_story_date(self): if not last_story_date or seconds_timesince(last_story_date) < 0: last_story_date = datetime.datetime.now() - + if last_story_date != self.last_story_date: self.last_story_date = last_story_date - self.save(update_fields=['last_story_date']) - + self.save(update_fields=["last_story_date"]) + @classmethod def setup_feeds_for_premium_subscribers(cls, feed_ids): logging.info(f" ---> ~SN~FMScheduling immediate premium setup of ~SB{len(feed_ids)}~SN feeds...") - + feeds = Feed.objects.filter(pk__in=feed_ids) for feed in feeds: feed.setup_feed_for_premium_subscribers() @@ -675,7 +706,7 @@ def setup_feed_for_premium_subscribers(self): self.count_subscribers() self.set_next_scheduled_update(verbose=settings.DEBUG) self.sync_redis() - + def check_feed_link_for_feed_address(self): @timelimit(10) def _1(): @@ -693,13 +724,16 @@ def _1(): found_feed_urls = feedfinder_forman.find_feeds(self.feed_link) if len(found_feed_urls) and found_feed_urls[0] != self.feed_address: feed_address = found_feed_urls[0] - + if feed_address: - if any(ignored_domain in feed_address for ignored_domain in [ - 'feedburner.com/atom.xml', - 'feedburner.com/feed/', - 'feedsportal.com', - ]): + if any( + ignored_domain in feed_address + for ignored_domain in [ + "feedburner.com/atom.xml", + "feedburner.com/feed/", + "feedsportal.com", + ] + ): logging.debug(" ---> Feed points to 'Wierdo' or 'feedsportal', ignoring.") return False, self try: @@ -717,135 +751,140 @@ def _1(): original_feed.save() merge_feeds(original_feed.pk, self.pk) return feed_address, feed - + if self.feed_address_locked: return False, self - + try: feed_address, feed = _1() except TimeoutError as e: - logging.debug(' ---> [%-30s] Feed address check timed out...' % (self.log_title[:30])) - self.save_feed_history(505, 'Timeout', e) + logging.debug(" ---> [%-30s] Feed address check timed out..." % (self.log_title[:30])) + self.save_feed_history(505, "Timeout", e) feed = self feed_address = None - + return bool(feed_address), feed def save_feed_history(self, status_code, message, exception=None, date=None): - fetch_history = MFetchHistory.add(feed_id=self.pk, - fetch_type='feed', - code=int(status_code), - date=date, - message=message, - exception=exception) - + fetch_history = MFetchHistory.add( + feed_id=self.pk, + fetch_type="feed", + code=int(status_code), + date=date, + message=message, + exception=exception, + ) + if status_code not in (200, 304): self.errors_since_good += 1 - self.count_errors_in_history('feed', status_code, fetch_history=fetch_history) + self.count_errors_in_history("feed", status_code, fetch_history=fetch_history) self.set_next_scheduled_update(verbose=settings.DEBUG) elif self.has_feed_exception or self.errors_since_good: self.errors_since_good = 0 self.has_feed_exception = False self.active = True self.save() - + def save_page_history(self, status_code, message, exception=None, date=None): - fetch_history = MFetchHistory.add(feed_id=self.pk, - fetch_type='page', - code=int(status_code), - date=date, - message=message, - exception=exception) - + fetch_history = MFetchHistory.add( + feed_id=self.pk, + fetch_type="page", + code=int(status_code), + date=date, + message=message, + exception=exception, + ) + if status_code not in (200, 304): - self.count_errors_in_history('page', status_code, fetch_history=fetch_history) + self.count_errors_in_history("page", status_code, fetch_history=fetch_history) elif self.has_page_exception or not self.has_page: self.has_page_exception = False self.has_page = True self.active = True self.save() - + def save_raw_feed(self, raw_feed, fetch_date): - MFetchHistory.add(feed_id=self.pk, - fetch_type='raw_feed', - code=200, - message=raw_feed, - date=fetch_date) - - def count_errors_in_history(self, exception_type='feed', status_code=None, fetch_history=None): + MFetchHistory.add(feed_id=self.pk, fetch_type="raw_feed", code=200, message=raw_feed, date=fetch_date) + + def count_errors_in_history(self, exception_type="feed", status_code=None, fetch_history=None): if not fetch_history: fetch_history = MFetchHistory.feed(self.pk) - fh = fetch_history[exception_type + '_fetch_history'] - non_errors = [h for h in fh if h['status_code'] and int(h['status_code']) in (200, 304)] - errors = [h for h in fh if h['status_code'] and int(h['status_code']) not in (200, 304)] - + fh = fetch_history[exception_type + "_fetch_history"] + non_errors = [h for h in fh if h["status_code"] and int(h["status_code"]) in (200, 304)] + errors = [h for h in fh if h["status_code"] and int(h["status_code"]) not in (200, 304)] + if len(non_errors) == 0 and len(errors) > 1: self.active = True - if exception_type == 'feed': + if exception_type == "feed": self.has_feed_exception = True # self.active = False # No longer, just geometrically fetch - elif exception_type == 'page': + elif exception_type == "page": self.has_page_exception = True self.exception_code = status_code or int(errors[0]) self.save() elif self.exception_code > 0: self.active = True self.exception_code = 0 - if exception_type == 'feed': + if exception_type == "feed": self.has_feed_exception = False - elif exception_type == 'page': + elif exception_type == "page": self.has_page_exception = False self.save() - - logging.debug(' ---> [%-30s] ~FBCounting any errors in history: %s (%s non errors)' % - (self.log_title[:30], len(errors), len(non_errors))) - + + logging.debug( + " ---> [%-30s] ~FBCounting any errors in history: %s (%s non errors)" + % (self.log_title[:30], len(errors), len(non_errors)) + ) + return errors, non_errors - def count_redirects_in_history(self, fetch_type='feed', fetch_history=None): - logging.debug(' ---> [%-30s] Counting redirects in history...' % (self.log_title[:30])) + def count_redirects_in_history(self, fetch_type="feed", fetch_history=None): + logging.debug(" ---> [%-30s] Counting redirects in history..." % (self.log_title[:30])) if not fetch_history: fetch_history = MFetchHistory.feed(self.pk) - fh = fetch_history[fetch_type+'_fetch_history'] - redirects = [h for h in fh if h['status_code'] and int(h['status_code']) in (301, 302)] - non_redirects = [h for h in fh if h['status_code'] and int(h['status_code']) not in (301, 302)] - + fh = fetch_history[fetch_type + "_fetch_history"] + redirects = [h for h in fh if h["status_code"] and int(h["status_code"]) in (301, 302)] + non_redirects = [h for h in fh if h["status_code"] and int(h["status_code"]) not in (301, 302)] + return redirects, non_redirects - + @property def original_feed_id(self): if self.branch_from_feed: return self.branch_from_feed.pk else: return self.pk - + @property def counts_converted_to_redis(self): SUBSCRIBER_EXPIRE_DATE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) - subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime('%s')) + subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime("%s")) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) total_key = "s:%s" % self.original_feed_id premium_key = "sp:%s" % self.original_feed_id - last_recount = r.zscore(total_key, -1) # Need to subtract this extra when counting subs + last_recount = r.zscore(total_key, -1) # Need to subtract this extra when counting subs # Check for expired feeds with no active users who would have triggered a cleanup if last_recount and last_recount > subscriber_expire: return True elif last_recount: - logging.info(" ---> [%-30s] ~SN~FBFeed has expired redis subscriber counts (%s < %s), clearing..." % ( - self.log_title[:30], last_recount, subscriber_expire)) + logging.info( + " ---> [%-30s] ~SN~FBFeed has expired redis subscriber counts (%s < %s), clearing..." + % (self.log_title[:30], last_recount, subscriber_expire) + ) r.delete(total_key, -1) r.delete(premium_key, -1) - + return False - + def count_subscribers(self, recount=True, verbose=False): if recount or not self.counts_converted_to_redis: from apps.profile.models import Profile + Profile.count_feed_subscribers(feed_id=self.pk) SUBSCRIBER_EXPIRE_DATE = datetime.datetime.now() - datetime.timedelta(days=settings.SUBSCRIBER_EXPIRE) - subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime('%s')) - now = int(datetime.datetime.now().strftime('%s')) + subscriber_expire = int(SUBSCRIBER_EXPIRE_DATE.strftime("%s")) + now = int(datetime.datetime.now().strftime("%s")) r = redis.Redis(connection_pool=settings.REDIS_FEED_SUB_POOL) total = 0 active = 0 @@ -853,9 +892,9 @@ def count_subscribers(self, recount=True, verbose=False): archive = 0 pro = 0 active_premium = 0 - + # Include all branched feeds in counts - feed_ids = [f['id'] for f in Feed.objects.filter(branch_from_feed=self.original_feed_id).values('id')] + feed_ids = [f["id"] for f in Feed.objects.filter(branch_from_feed=self.original_feed_id).values("id")] feed_ids.append(self.original_feed_id) feed_ids = list(set(feed_ids)) @@ -863,21 +902,21 @@ def count_subscribers(self, recount=True, verbose=False): # For each branched feed, count different subscribers for feed_id in feed_ids: pipeline = r.pipeline() - + # now+1 ensures `-1` flag will be corrected for later with - 1 total_key = "s:%s" % feed_id premium_key = "sp:%s" % feed_id archive_key = "sarchive:%s" % feed_id pro_key = "spro:%s" % feed_id pipeline.zcard(total_key) - pipeline.zcount(total_key, subscriber_expire, now+1) + pipeline.zcount(total_key, subscriber_expire, now + 1) pipeline.zcard(premium_key) - pipeline.zcount(premium_key, subscriber_expire, now+1) + pipeline.zcount(premium_key, subscriber_expire, now + 1) pipeline.zcard(archive_key) pipeline.zcard(pro_key) results = pipeline.execute() - + # -1 due to counts_converted_to_redis using key=-1 for last_recount date total += max(0, results[0] - 1) active += max(0, results[1] - 1) @@ -885,64 +924,69 @@ def count_subscribers(self, recount=True, verbose=False): active_premium += max(0, results[3] - 1) archive += max(0, results[4] - 1) pro += max(0, results[5] - 1) - + original_num_subscribers = self.num_subscribers original_active_subs = self.active_subscribers original_premium_subscribers = self.premium_subscribers original_active_premium_subscribers = self.active_premium_subscribers original_archive_subscribers = self.archive_subscribers original_pro_subscribers = self.pro_subscribers - logging.info(" ---> [%-30s] ~SN~FBCounting subscribers from ~FCredis~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s ~SN~FC%s" % - (self.log_title[:30], total, active, premium, active_premium, archive, pro, "(%s branches)" % (len(feed_ids)-1) if len(feed_ids)>1 else "")) + logging.info( + " ---> [%-30s] ~SN~FBCounting subscribers from ~FCredis~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s ~SN~FC%s" + % ( + self.log_title[:30], + total, + active, + premium, + active_premium, + archive, + pro, + "(%s branches)" % (len(feed_ids) - 1) if len(feed_ids) > 1 else "", + ) + ) else: from apps.reader.models import UserSubscription - + subs = UserSubscription.objects.filter(feed__in=feed_ids) original_num_subscribers = self.num_subscribers total = subs.count() - + active_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE + feed__in=feed_ids, active=True, user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE ) original_active_subs = self.active_subscribers active = active_subs.count() - + premium_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_premium=True + feed__in=feed_ids, active=True, user__profile__is_premium=True ) original_premium_subscribers = self.premium_subscribers premium = premium_subs.count() - + archive_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_archive=True + feed__in=feed_ids, active=True, user__profile__is_archive=True ) original_archive_subscribers = self.archive_subscribers archive = archive_subs.count() - + pro_subs = UserSubscription.objects.filter( - feed__in=feed_ids, - active=True, - user__profile__is_pro=True + feed__in=feed_ids, active=True, user__profile__is_pro=True ) original_pro_subscribers = self.pro_subscribers pro = pro_subs.count() - + active_premium_subscribers = UserSubscription.objects.filter( - feed__in=feed_ids, + feed__in=feed_ids, active=True, user__profile__is_premium=True, - user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE + user__profile__last_seen_on__gte=SUBSCRIBER_EXPIRE_DATE, ) original_active_premium_subscribers = self.active_premium_subscribers active_premium = active_premium_subscribers.count() - logging.debug(" ---> [%-30s] ~SN~FBCounting subscribers from ~FYpostgres~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" % - (self.log_title[:30], total, active, premium, active_premium, archive, pro)) + logging.debug( + " ---> [%-30s] ~SN~FBCounting subscribers from ~FYpostgres~FB: ~FMt:~SB~FM%s~SN a:~SB%s~SN p:~SB%s~SN ap:~SB%s~SN archive:~SB%s~SN pro:~SB%s" + % (self.log_title[:30], total, active, premium, active_premium, archive, pro) + ) if settings.DOCKERBUILD: # Local installs enjoy 100% active feeds @@ -955,42 +999,55 @@ def count_subscribers(self, recount=True, verbose=False): self.active_premium_subscribers = active_premium self.archive_subscribers = archive self.pro_subscribers = pro - if (self.num_subscribers != original_num_subscribers or - self.active_subscribers != original_active_subs or - self.premium_subscribers != original_premium_subscribers or - self.active_premium_subscribers != original_active_premium_subscribers or - self.archive_subscribers != original_archive_subscribers or - self.pro_subscribers != original_pro_subscribers): + if ( + self.num_subscribers != original_num_subscribers + or self.active_subscribers != original_active_subs + or self.premium_subscribers != original_premium_subscribers + or self.active_premium_subscribers != original_active_premium_subscribers + or self.archive_subscribers != original_archive_subscribers + or self.pro_subscribers != original_pro_subscribers + ): if original_premium_subscribers == -1 or original_active_premium_subscribers == -1: self.save() else: - self.save(update_fields=['num_subscribers', 'active_subscribers', - 'premium_subscribers', 'active_premium_subscribers', - 'archive_subscribers', 'pro_subscribers']) - + self.save( + update_fields=[ + "num_subscribers", + "active_subscribers", + "premium_subscribers", + "active_premium_subscribers", + "archive_subscribers", + "pro_subscribers", + ] + ) + if verbose: if self.num_subscribers <= 1: print(".", end=" ") else: - print("\n %s> %s subscriber%s: %s" % ( - '-' * min(self.num_subscribers, 20), - self.num_subscribers, - '' if self.num_subscribers == 1 else 's', - self.feed_title, - ), end=' ') - + print( + "\n %s> %s subscriber%s: %s" + % ( + "-" * min(self.num_subscribers, 20), + self.num_subscribers, + "" if self.num_subscribers == 1 else "s", + self.feed_title, + ), + end=" ", + ) + def _split_favicon_color(self, color=None): if not color: color = self.favicon_color if not color: return None, None, None - splitter = lambda s, p: [s[i:i+p] for i in range(0, len(s), p)] + splitter = lambda s, p: [s[i : i + p] for i in range(0, len(s), p)] red, green, blue = splitter(color[:6], 2) return red, green, blue - + def favicon_fade(self): return self.adjust_color(adjust=30) - + def adjust_color(self, color=None, adjust=0): red, green, blue = self._split_favicon_color(color=color) if red and green and blue: @@ -1002,11 +1059,11 @@ def adjust_color(self, color=None, adjust=0): def favicon_border(self): red, green, blue = self._split_favicon_color() if red and green and blue: - fade_red = hex(min(int(int(red, 16) * .75), 255))[2:].zfill(2) - fade_green = hex(min(int(int(green, 16) * .75), 255))[2:].zfill(2) - fade_blue = hex(min(int(int(blue, 16) * .75), 255))[2:].zfill(2) + fade_red = hex(min(int(int(red, 16) * 0.75), 255))[2:].zfill(2) + fade_green = hex(min(int(int(green, 16) * 0.75), 255))[2:].zfill(2) + fade_blue = hex(min(int(int(blue, 16) * 0.75), 255))[2:].zfill(2) return "%s%s%s" % (fade_red, fade_green, fade_blue) - + def favicon_text_color(self): # Color format: {r: 1, g: .5, b: 0} def contrast(color1, color2): @@ -1018,10 +1075,10 @@ def contrast(color1, color2): return (lum2 + 0.05) / (lum1 + 0.05) def luminosity(color): - r = color['red'] - g = color['green'] - b = color['blue'] - val = lambda c: c/12.92 if c <= 0.02928 else math.pow(((c + 0.055)/1.055), 2.4) + r = color["red"] + g = color["green"] + b = color["blue"] + val = lambda c: c / 12.92 if c <= 0.02928 else math.pow(((c + 0.055) / 1.055), 2.4) red = val(r) green = val(g) blue = val(b) @@ -1030,25 +1087,25 @@ def luminosity(color): red, green, blue = self._split_favicon_color() if red and green and blue: color = { - 'red': int(red, 16) / 256.0, - 'green': int(green, 16) / 256.0, - 'blue': int(blue, 16) / 256.0, + "red": int(red, 16) / 256.0, + "green": int(green, 16) / 256.0, + "blue": int(blue, 16) / 256.0, } white = { - 'red': 1, - 'green': 1, - 'blue': 1, + "red": 1, + "green": 1, + "blue": 1, } grey = { - 'red': 0.5, - 'green': 0.5, - 'blue': 0.5, + "red": 0.5, + "green": 0.5, + "blue": 0.5, } - + if contrast(color, white) > contrast(color, grey): - return 'white' + return "white" else: - return 'black' + return "black" def fill_out_archive_stories(self, force=False, starting_page=1): """ @@ -1058,33 +1115,34 @@ def fill_out_archive_stories(self, force=False, starting_page=1): before_story_count = MStory.objects(story_feed_id=self.pk).count() if not force and not self.archive_subscribers: - logging.debug(" ---> [%-30s] ~FBNot filling out archive stories, no archive subscribers" % ( - self.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBNot filling out archive stories, no archive subscribers" + % (self.log_title[:30]) + ) return before_story_count, before_story_count self.update(archive_page=starting_page) after_story_count = MStory.objects(story_feed_id=self.pk).count() - logging.debug(" ---> [%-30s] ~FCFilled out archive, ~FM~SB%s~SN new stories~FC, total of ~SB%s~SN stories" % ( - self.log_title[:30], - after_story_count - before_story_count, - after_story_count)) - + logging.debug( + " ---> [%-30s] ~FCFilled out archive, ~FM~SB%s~SN new stories~FC, total of ~SB%s~SN stories" + % (self.log_title[:30], after_story_count - before_story_count, after_story_count) + ) + def save_feed_stories_last_month(self, verbose=False): month_ago = datetime.datetime.utcnow() - datetime.timedelta(days=30) - stories_last_month = MStory.objects(story_feed_id=self.pk, - story_date__gte=month_ago).count() + stories_last_month = MStory.objects(story_feed_id=self.pk, story_date__gte=month_ago).count() if self.stories_last_month != stories_last_month: self.stories_last_month = stories_last_month - self.save(update_fields=['stories_last_month']) - + self.save(update_fields=["stories_last_month"]) + if verbose: print(f" ---> {self.feed} [{self.pk}]: {self.stories_last_month} stories last month") - + def save_feed_story_history_statistics(self, current_counts=None): """ Fills in missing months between earlier occurances and now. - + Save format: [('YYYY-MM, #), ...] Example output: [(2010-12, 123), (2011-01, 146)] """ @@ -1096,7 +1154,7 @@ def save_feed_story_history_statistics(self, current_counts=None): current_counts = self.data.story_count_history and json.decode(self.data.story_count_history) if isinstance(current_counts, dict): - current_counts = current_counts['months'] + current_counts = current_counts["months"] if not current_counts: current_counts = [] @@ -1118,15 +1176,15 @@ def save_feed_story_history_statistics(self, current_counts=None): dates = defaultdict(int) hours = defaultdict(int) days = defaultdict(int) - results = MStory.objects(story_feed_id=self.pk).map_reduce(map_f, reduce_f, output='inline') + results = MStory.objects(story_feed_id=self.pk).map_reduce(map_f, reduce_f, output="inline") for result in results: - dates[result.value['month']] += 1 - hours[int(result.value['hour'])] += 1 - days[int(result.value['day'])] += 1 - year = int(re.findall(r"(\d{4})-\d{1,2}", result.value['month'])[0]) + dates[result.value["month"]] += 1 + hours[int(result.value["hour"])] += 1 + days[int(result.value["day"])] += 1 + year = int(re.findall(r"(\d{4})-\d{1,2}", result.value["month"])[0]) if year < min_year and year > 2000: min_year = year - + # Add on to existing months, always amending up, never down. (Current month # is guaranteed to be accurate, since trim_feeds won't delete it until after # a month. Hacker News can have 1,000+ and still be counted.) @@ -1136,38 +1194,37 @@ def save_feed_story_history_statistics(self, current_counts=None): dates[current_month] = current_count if year < min_year and year > 2000: min_year = year - - # Assemble a list with 0's filled in for missing months, + + # Assemble a list with 0's filled in for missing months, # trimming left and right 0's. months = [] start = False - for year in range(min_year, now.year+1): - for month in range(1, 12+1): + for year in range(min_year, now.year + 1): + for month in range(1, 12 + 1): if datetime.datetime(year, month, 1) < now: - key = '%s-%s' % (year, month) + key = "%s-%s" % (year, month) if dates.get(key) or start: start = True months.append((key, dates.get(key, 0))) total += dates.get(key, 0) if dates.get(key, 0) > 0: - month_count += 1 # Only count months that have stories for the average + month_count += 1 # Only count months that have stories for the average original_story_count_history = self.data.story_count_history - self.data.story_count_history = json.encode({'months': months, 'hours': hours, 'days': days}) + self.data.story_count_history = json.encode({"months": months, "hours": hours, "days": days}) if self.data.story_count_history != original_story_count_history: - self.data.save(update_fields=['story_count_history']) - + self.data.save(update_fields=["story_count_history"]) + original_average_stories_per_month = self.average_stories_per_month if not total or not month_count: self.average_stories_per_month = 0 else: self.average_stories_per_month = int(round(total / float(month_count))) if self.average_stories_per_month != original_average_stories_per_month: - self.save(update_fields=['average_stories_per_month']) - - + self.save(update_fields=["average_stories_per_month"]) + def save_classifier_counts(self): from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag - + def calculate_scores(cls, facet): map_f = """ function() { @@ -1176,7 +1233,9 @@ def calculate_scores(cls, facet): neg: this.score<0 ? Math.abs(this.score) : 0 }); } - """ % (facet) + """ % ( + facet + ) reduce_f = """ function(key, values) { var result = {pos: 0, neg: 0}; @@ -1188,68 +1247,72 @@ def calculate_scores(cls, facet): } """ scores = [] - res = cls.objects(feed_id=self.pk).map_reduce(map_f, reduce_f, output='inline') + res = cls.objects(feed_id=self.pk).map_reduce(map_f, reduce_f, output="inline") for r in res: - facet_values = dict([(k, int(v)) for k,v in r.value.items()]) + facet_values = dict([(k, int(v)) for k, v in r.value.items()]) facet_values[facet] = r.key - if facet_values['pos'] + facet_values['neg'] >= 1: + if facet_values["pos"] + facet_values["neg"] >= 1: scores.append(facet_values) - scores = sorted(scores, key=lambda v: v['neg'] - v['pos']) + scores = sorted(scores, key=lambda v: v["neg"] - v["pos"]) return scores - + scores = {} - for cls, facet in [(MClassifierTitle, 'title'), - (MClassifierAuthor, 'author'), - (MClassifierTag, 'tag'), - (MClassifierFeed, 'feed_id')]: + for cls, facet in [ + (MClassifierTitle, "title"), + (MClassifierAuthor, "author"), + (MClassifierTag, "tag"), + (MClassifierFeed, "feed_id"), + ]: scores[facet] = calculate_scores(cls, facet) - if facet == 'feed_id' and scores[facet]: - scores['feed'] = scores[facet] - del scores['feed_id'] + if facet == "feed_id" and scores[facet]: + scores["feed"] = scores[facet] + del scores["feed_id"] elif not scores[facet]: del scores[facet] - + if scores: self.data.feed_classifier_counts = json.encode(scores) self.data.save() - + return scores - + @property def user_agent(self): feed_parts = urllib.parse.urlparse(self.feed_address) - if feed_parts.netloc.find('.tumblr.com') != -1: + if feed_parts.netloc.find(".tumblr.com") != -1: # Certain tumblr feeds will redirect to tumblr's login page when fetching. # A known workaround is using facebook's user agent. - return 'facebookexternalhit/1.0 (+http://www.facebook.com/externalhit_uatext.php)' + return "facebookexternalhit/1.0 (+http://www.facebook.com/externalhit_uatext.php)" - ua = ('NewsBlur Feed Fetcher - %s subscriber%s - %s %s' % ( - self.num_subscribers, - 's' if self.num_subscribers != 1 else '', - self.permalink, - self.fake_user_agent, - )) + ua = "NewsBlur Feed Fetcher - %s subscriber%s - %s %s" % ( + self.num_subscribers, + "s" if self.num_subscribers != 1 else "", + self.permalink, + self.fake_user_agent, + ) return ua - + @property def fake_user_agent(self): - ua = ('("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' - 'AppleWebKit/605.1.15 (KHTML, like Gecko) ' - 'Version/14.0.1 Safari/605.1.15")') - + ua = ( + '("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) ' + "AppleWebKit/605.1.15 (KHTML, like Gecko) " + 'Version/14.0.1 Safari/605.1.15")' + ) + return ua - + def fetch_headers(self, fake=False): headers = { - 'User-Agent': self.user_agent if not fake else self.fake_user_agent, - 'Accept': 'application/atom+xml, application/rss+xml, application/xml;q=0.8, text/xml;q=0.6, */*;q=0.2', - 'Accept-Encoding': 'gzip, deflate', + "User-Agent": self.user_agent if not fake else self.fake_user_agent, + "Accept": "application/atom+xml, application/rss+xml, application/xml;q=0.8, text/xml;q=0.6, */*;q=0.2", + "Accept-Encoding": "gzip, deflate", } - + return headers - + def update(self, **kwargs): try: from utils import feed_fetcher @@ -1260,24 +1323,24 @@ def update(self, **kwargs): original_feed_id = int(self.pk) options = { - 'verbose': kwargs.get('verbose'), - 'timeout': 10, - 'single_threaded': kwargs.get('single_threaded', True), - 'force': kwargs.get('force'), - 'force_fp': kwargs.get('force_fp'), - 'compute_scores': kwargs.get('compute_scores', True), - 'mongodb_replication_lag': kwargs.get('mongodb_replication_lag', None), - 'fake': kwargs.get('fake'), - 'quick': kwargs.get('quick'), - 'updates_off': kwargs.get('updates_off'), - 'debug': kwargs.get('debug'), - 'fpf': kwargs.get('fpf'), - 'feed_xml': kwargs.get('feed_xml'), - 'requesting_user_id': kwargs.get('requesting_user_id', None), - 'archive_page': kwargs.get('archive_page', None), + "verbose": kwargs.get("verbose"), + "timeout": 10, + "single_threaded": kwargs.get("single_threaded", True), + "force": kwargs.get("force"), + "force_fp": kwargs.get("force_fp"), + "compute_scores": kwargs.get("compute_scores", True), + "mongodb_replication_lag": kwargs.get("mongodb_replication_lag", None), + "fake": kwargs.get("fake"), + "quick": kwargs.get("quick"), + "updates_off": kwargs.get("updates_off"), + "debug": kwargs.get("debug"), + "fpf": kwargs.get("fpf"), + "feed_xml": kwargs.get("feed_xml"), + "requesting_user_id": kwargs.get("requesting_user_id", None), + "archive_page": kwargs.get("archive_page", None), } - - if getattr(settings, 'TEST_DEBUG', False) and "NEWSBLUR_DIR" in self.feed_address: + + if getattr(settings, "TEST_DEBUG", False) and "NEWSBLUR_DIR" in self.feed_address: print(" ---> Testing feed fetch: %s" % self.log_title) # options['force_fp'] = True # No, why would this be needed? original_feed_address = self.feed_address @@ -1286,39 +1349,42 @@ def update(self, **kwargs): if self.feed_link: self.feed_link = self.feed_link.replace("%(NEWSBLUR_DIR)s", settings.NEWSBLUR_DIR) if self.feed_address != original_feed_address or self.feed_link != original_feed_link: - self.save(update_fields=['feed_address', 'feed_link']) - + self.save(update_fields=["feed_address", "feed_link"]) + if self.is_newsletter: feed = self.update_newsletter_icon() else: - disp = feed_fetcher.Dispatcher(options, 1) + disp = feed_fetcher.Dispatcher(options, 1) disp.add_jobs([[self.pk]]) feed = disp.run_jobs() - + if feed: feed = Feed.get_by_id(feed.pk) if feed: feed.last_update = datetime.datetime.utcnow() feed.set_next_scheduled_update(verbose=settings.DEBUG) - r.zadd('fetched_feeds_last_hour', { feed.pk: int(datetime.datetime.now().strftime('%s')) }) - + r.zadd("fetched_feeds_last_hour", {feed.pk: int(datetime.datetime.now().strftime("%s"))}) + if not feed or original_feed_id != feed.pk: - logging.info(" ---> ~FRFeed changed id, removing %s from tasked_feeds queue..." % original_feed_id) - r.zrem('tasked_feeds', original_feed_id) - r.zrem('error_feeds', original_feed_id) + logging.info( + " ---> ~FRFeed changed id, removing %s from tasked_feeds queue..." % original_feed_id + ) + r.zrem("tasked_feeds", original_feed_id) + r.zrem("error_feeds", original_feed_id) if feed: - r.zrem('tasked_feeds', feed.pk) - r.zrem('error_feeds', feed.pk) - + r.zrem("tasked_feeds", feed.pk) + r.zrem("error_feeds", feed.pk) + return feed - + def update_newsletter_icon(self): from apps.rss_feeds.icon_importer import IconImporter + icon_importer = IconImporter(self) icon_importer.save() - + return self - + @classmethod def get_by_id(cls, feed_id, feed_address=None): try: @@ -1333,41 +1399,43 @@ def get_by_id(cls, feed_id, feed_address=None): duplicate_feeds = DuplicateFeed.objects.filter(duplicate_address=feed_address) if duplicate_feeds: return duplicate_feeds[0].feed - + @classmethod def get_by_name(cls, query, limit=1): results = SearchFeed.query(query) feed_ids = [result.feed_id for result in results] - + if limit == 1: return Feed.get_by_id(feed_ids[0]) else: return [Feed.get_by_id(f) for f in feed_ids][:limit] - + def add_update_stories(self, stories, existing_stories, verbose=False, updates_off=False): ret_values = dict(new=0, updated=0, same=0, error=0) error_count = self.error_count - new_story_hashes = [s.get('story_hash') for s in stories] - + new_story_hashes = [s.get("story_hash") for s in stories] + if settings.DEBUG or verbose: - logging.debug(" ---> [%-30s] ~FBChecking ~SB%s~SN new/updated against ~SB%s~SN stories" % ( - self.log_title[:30], - len(stories), - len(list(existing_stories.keys())))) + logging.debug( + " ---> [%-30s] ~FBChecking ~SB%s~SN new/updated against ~SB%s~SN stories" + % (self.log_title[:30], len(stories), len(list(existing_stories.keys()))) + ) + @timelimit(5) def _1(story, story_content, existing_stories, new_story_hashes): - existing_story, story_has_changed = self._exists_story(story, story_content, - existing_stories, new_story_hashes) + existing_story, story_has_changed = self._exists_story( + story, story_content, existing_stories, new_story_hashes + ) return existing_story, story_has_changed - + for story in stories: if verbose: - logging.debug(" ---> [%-30s] ~FBChecking ~SB%s~SN / ~SB%s" % ( - self.log_title[:30], - story.get('title'), - story.get('guid'))) - - story_content = story.get('story_content') + logging.debug( + " ---> [%-30s] ~FBChecking ~SB%s~SN / ~SB%s" + % (self.log_title[:30], story.get("title"), story.get("guid")) + ) + + story_content = story.get("story_content") if error_count: story_content = strip_comments__lxml(story_content) else: @@ -1375,39 +1443,49 @@ def _1(story, story_content, existing_stories, new_story_hashes): story_tags = self.get_tags(story) story_link = self.get_permalink(story) replace_story_date = False - + try: - existing_story, story_has_changed = _1(story, story_content, - existing_stories, new_story_hashes) + existing_story, story_has_changed = _1( + story, story_content, existing_stories, new_story_hashes + ) except TimeoutError: - logging.debug(' ---> [%-30s] ~SB~FRExisting story check timed out...' % (self.log_title[:30])) + logging.debug( + " ---> [%-30s] ~SB~FRExisting story check timed out..." % (self.log_title[:30]) + ) existing_story = None story_has_changed = False - + if existing_story is None: if settings.DEBUG and False: - logging.debug(' ---> New story in feed (%s - %s): %s' % (self.feed_title, story.get('title'), len(story_content))) - - s = MStory(story_feed_id = self.pk, - story_date = story.get('published'), - story_title = story.get('title'), - story_content = story_content, - story_author_name = story.get('author'), - story_permalink = story_link, - story_guid = story.get('guid'), - story_tags = story_tags + logging.debug( + " ---> New story in feed (%s - %s): %s" + % (self.feed_title, story.get("title"), len(story_content)) + ) + + s = MStory( + story_feed_id=self.pk, + story_date=story.get("published"), + story_title=story.get("title"), + story_content=story_content, + story_author_name=story.get("author"), + story_permalink=story_link, + story_guid=story.get("guid"), + story_tags=story_tags, ) try: s.save() - ret_values['new'] += 1 + ret_values["new"] += 1 s.publish_to_subscribers() except (IntegrityError, OperationError) as e: - ret_values['error'] += 1 + ret_values["error"] += 1 if settings.DEBUG: - logging.info(' ---> [%-30s] ~SN~FRIntegrityError on new story: %s - %s' % (self.feed_title[:30], story.get('guid'), e)) + logging.info( + " ---> [%-30s] ~SN~FRIntegrityError on new story: %s - %s" + % (self.feed_title[:30], story.get("guid"), e) + ) if self.search_indexed: s.index_story_for_search() - elif existing_story and story_has_changed and not updates_off and ret_values['updated'] < 3: + elif existing_story and story_has_changed and not updates_off and ret_values["updated"] < 3: # update story original_content = None try: @@ -1415,19 +1493,22 @@ def _1(story, story_content, existing_stories, new_story_hashes): try: existing_story = MStory.objects.get(id=existing_story.id) except ValidationError: - existing_story, _ = MStory.find_story(existing_story.story_feed_id, - existing_story.id, - original_only=True) + existing_story, _ = MStory.find_story( + existing_story.story_feed_id, existing_story.id, original_only=True + ) elif existing_story and existing_story.story_hash: - existing_story, _ = MStory.find_story(existing_story.story_feed_id, - existing_story.story_hash, - original_only=True) + existing_story, _ = MStory.find_story( + existing_story.story_feed_id, existing_story.story_hash, original_only=True + ) else: raise MStory.DoesNotExist except (MStory.DoesNotExist, OperationError) as e: - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FROperation on existing story: %s - %s' % (self.feed_title[:30], story.get('guid'), e)) + logging.info( + " ---> [%-30s] ~SN~FROperation on existing story: %s - %s" + % (self.feed_title[:30], story.get("guid"), e) + ) continue if existing_story.story_original_content_z: original_content = zlib.decompress(existing_story.story_original_content_z) @@ -1445,60 +1526,71 @@ def _1(story, story_content, existing_stories, new_story_hashes): # logging.debug("\t\tDiff content: %s" % diff.getDiff()) # if existing_story.story_title != story.get('title'): # logging.debug('\tExisting title / New: : \n\t\t- %s\n\t\t- %s' % (existing_story.story_title, story.get('title'))) - if existing_story.story_hash != story.get('story_hash'): - self.update_story_with_new_guid(existing_story, story.get('guid')) + if existing_story.story_hash != story.get("story_hash"): + self.update_story_with_new_guid(existing_story, story.get("guid")) if verbose: - logging.debug('- Updated story in feed (%s - %s): %s / %s' % (self.feed_title, story.get('title'), len(story_content_diff), len(story_content))) - + logging.debug( + "- Updated story in feed (%s - %s): %s / %s" + % (self.feed_title, story.get("title"), len(story_content_diff), len(story_content)) + ) + existing_story.story_feed = self.pk - existing_story.story_title = story.get('title') + existing_story.story_title = story.get("title") existing_story.story_content = story_content_diff existing_story.story_latest_content = story_content existing_story.story_original_content = original_content - existing_story.story_author_name = story.get('author') + existing_story.story_author_name = story.get("author") existing_story.story_permalink = story_link - existing_story.story_guid = story.get('guid') + existing_story.story_guid = story.get("guid") existing_story.story_tags = story_tags - existing_story.original_text_z = None # Reset Text view cache + existing_story.original_text_z = None # Reset Text view cache # Do not allow publishers to change the story date once a story is published. # Leads to incorrect unread story counts. if replace_story_date: - existing_story.story_date = story.get('published') # Really shouldn't do this. + existing_story.story_date = story.get("published") # Really shouldn't do this. existing_story.extract_image_urls(force=True) try: existing_story.save() - ret_values['updated'] += 1 + ret_values["updated"] += 1 except (IntegrityError, OperationError): - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FRIntegrityError on updated story: %s' % (self.feed_title[:30], story.get('title')[:30])) + logging.info( + " ---> [%-30s] ~SN~FRIntegrityError on updated story: %s" + % (self.feed_title[:30], story.get("title")[:30]) + ) except ValidationError: - ret_values['error'] += 1 + ret_values["error"] += 1 if verbose: - logging.info(' ---> [%-30s] ~SN~FRValidationError on updated story: %s' % (self.feed_title[:30], story.get('title')[:30])) + logging.info( + " ---> [%-30s] ~SN~FRValidationError on updated story: %s" + % (self.feed_title[:30], story.get("title")[:30]) + ) if self.search_indexed: existing_story.index_story_for_search() else: - ret_values['same'] += 1 + ret_values["same"] += 1 if verbose: - logging.debug("Unchanged story (%s): %s / %s " % (story.get('story_hash'), story.get('guid'), story.get('title'))) - + logging.debug( + "Unchanged story (%s): %s / %s " + % (story.get("story_hash"), story.get("guid"), story.get("title")) + ) + return ret_values - + def update_story_with_new_guid(self, existing_story, new_story_guid): from apps.reader.models import RUserStory from apps.social.models import MSharedStory existing_story.remove_from_redis() existing_story.remove_from_search_index() - + old_hash = existing_story.story_hash new_hash = MStory.ensure_story_hash(new_story_guid, self.pk) RUserStory.switch_hash(feed=self, old_hash=old_hash, new_hash=new_hash) - - shared_stories = MSharedStory.objects.filter(story_feed_id=self.pk, - story_hash=old_hash) + + shared_stories = MSharedStory.objects.filter(story_feed_id=self.pk, story_hash=old_hash) for story in shared_stories: story.story_guid = new_story_guid story.story_hash = new_hash @@ -1507,18 +1599,19 @@ def update_story_with_new_guid(self, existing_story, new_story_guid): except NotUniqueError: # Story is already shared, skip. pass - + def save_popular_tags(self, feed_tags=None, verbose=False): if not feed_tags: - all_tags = MStory.objects(story_feed_id=self.pk, - story_tags__exists=True).item_frequencies('story_tags') - feed_tags = sorted([(k, v) for k, v in list(all_tags.items()) if int(v) > 0], - key=itemgetter(1), - reverse=True)[:25] + all_tags = MStory.objects(story_feed_id=self.pk, story_tags__exists=True).item_frequencies( + "story_tags" + ) + feed_tags = sorted( + [(k, v) for k, v in list(all_tags.items()) if int(v) > 0], key=itemgetter(1), reverse=True + )[:25] popular_tags = json.encode(feed_tags) if verbose: print("Found %s tags: %s" % (len(feed_tags), popular_tags)) - + # TODO: This len() bullshit will be gone when feeds move to mongo # On second thought, it might stay, because we don't want # popular tags the size of a small planet. I'm looking at you @@ -1526,7 +1619,7 @@ def save_popular_tags(self, feed_tags=None, verbose=False): if len(popular_tags) < 1024: if self.data.popular_tags != popular_tags: self.data.popular_tags = popular_tags - self.data.save(update_fields=['popular_tags']) + self.data.save(update_fields=["popular_tags"]) return tags_list = [] @@ -1534,21 +1627,21 @@ def save_popular_tags(self, feed_tags=None, verbose=False): tags_list = json.decode(feed_tags) if len(tags_list) >= 1: self.save_popular_tags(tags_list[:-1]) - + def save_popular_authors(self, feed_authors=None): if not feed_authors: authors = defaultdict(int) - for story in MStory.objects(story_feed_id=self.pk).only('story_author_name'): + for story in MStory.objects(story_feed_id=self.pk).only("story_author_name"): authors[story.story_author_name] += 1 - feed_authors = sorted([(k, v) for k, v in list(authors.items()) if k], - key=itemgetter(1), - reverse=True)[:20] + feed_authors = sorted( + [(k, v) for k, v in list(authors.items()) if k], key=itemgetter(1), reverse=True + )[:20] popular_authors = json.encode(feed_authors) if len(popular_authors) < 1023: if self.data.popular_authors != popular_authors: self.data.popular_authors = popular_authors - self.data.save(update_fields=['popular_authors']) + self.data.save(update_fields=["popular_authors"]) return if len(feed_authors) > 1: @@ -1558,19 +1651,24 @@ def save_popular_authors(self, feed_authors=None): def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0, end=None): now = datetime.datetime.now() month_ago = now - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES) - feed_count = end or Feed.objects.latest('pk').pk + feed_count = end or Feed.objects.latest("pk").pk for feed_id in range(start, feed_count): if feed_id % 1000 == 0: - print("\n\n -------------------------- %s (%s deleted so far) --------------------------\n\n" % (feed_id, total)) + print( + "\n\n -------------------------- %s (%s deleted so far) --------------------------\n\n" + % (feed_id, total) + ) try: feed = Feed.objects.get(pk=feed_id) except Feed.DoesNotExist: continue # Ensure only feeds with no active subscribers are being trimmed - if (feed.active_subscribers <= 0 and - (not feed.archive_subscribers or feed.archive_subscribers <= 0) and - (not feed.last_story_date or feed.last_story_date < month_ago)): + if ( + feed.active_subscribers <= 0 + and (not feed.archive_subscribers or feed.archive_subscribers <= 0) + and (not feed.last_story_date or feed.last_story_date < month_ago) + ): # 1 month since last story = keep 5 stories, >6 months since, only keep 1 story months_ago = 6 if feed.last_story_date: @@ -1585,18 +1683,17 @@ def trim_old_stories(cls, start=0, verbose=True, dryrun=False, total=0, end=None print(" DRYRUN: %s/%s cutoff - %s" % (cutoff, feed.story_cutoff, feed)) else: total += feed.trim_feed(verbose=verbose) - - + print(" ---> Deleted %s stories in total." % total) - + @property def story_cutoff(self): return self.number_of_stories_to_store() - + def number_of_stories_to_store(self, pre_archive=False): if self.archive_subscribers and self.archive_subscribers > 0 and not pre_archive: return 10000 - + cutoff = 500 if self.active_subscribers <= 0: cutoff = 25 @@ -1612,21 +1709,25 @@ def number_of_stories_to_store(self, pre_archive=False): cutoff = 400 elif self.active_premium_subscribers <= 20: cutoff = 450 - + if self.active_subscribers and self.average_stories_per_month < 5 and self.stories_last_month < 5: cutoff /= 2 - if self.active_premium_subscribers <= 1 and self.average_stories_per_month <= 1 and self.stories_last_month <= 1: + if ( + self.active_premium_subscribers <= 1 + and self.average_stories_per_month <= 1 + and self.stories_last_month <= 1 + ): cutoff /= 2 - + r = redis.Redis(connection_pool=settings.REDIS_FEED_READ_POOL) pipeline = r.pipeline() read_stories_per_week = [] now = datetime.datetime.now() # Check to see how many stories have been read each week since the feed's days of story hashes - for weeks_back in range(2*int(math.floor(settings.DAYS_OF_STORY_HASHES/7))): - weeks_ago = now - datetime.timedelta(days=7*weeks_back) - week_of_year = weeks_ago.strftime('%Y-%U') + for weeks_back in range(2 * int(math.floor(settings.DAYS_OF_STORY_HASHES / 7))): + weeks_ago = now - datetime.timedelta(days=7 * weeks_back) + week_of_year = weeks_ago.strftime("%Y-%U") feed_read_key = "fR:%s:%s" % (self.pk, week_of_year) pipeline.get(feed_read_key) read_stories_per_week = pipeline.execute() @@ -1635,16 +1736,26 @@ def number_of_stories_to_store(self, pre_archive=False): original_cutoff = cutoff cutoff = min(cutoff, 10) try: - logging.debug(" ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" % (self.log_title[:30], cutoff, original_cutoff, self.last_story_date.strftime("%Y-%m-%d") if self.last_story_date else "No last story date")) + logging.debug( + " ---> [%-30s] ~FBTrimming down to ~SB%s (instead of %s)~SN stories (~FM%s~FB)" + % ( + self.log_title[:30], + cutoff, + original_cutoff, + self.last_story_date.strftime("%Y-%m-%d") + if self.last_story_date + else "No last story date", + ) + ) except ValueError as e: logging.debug(" ***> [%-30s] Error trimming: %s" % (self.log_title[:30], e)) pass - - if getattr(settings, 'OVERRIDE_STORY_COUNT_MAX', None): + + if getattr(settings, "OVERRIDE_STORY_COUNT_MAX", None): cutoff = settings.OVERRIDE_STORY_COUNT_MAX - + return int(cutoff) - + def trim_feed(self, verbose=False, cutoff=None): if not cutoff: cutoff = self.story_cutoff @@ -1664,21 +1775,25 @@ def count_fs_size_bytes(self): for story in stories: count += 1 story_with_content = story.to_mongo() - if story_with_content.get('story_content_z', None): - story_with_content['story_content'] = zlib.decompress(story_with_content['story_content_z']) - del story_with_content['story_content_z'] - if story_with_content.get('original_page_z', None): - story_with_content['original_page'] = zlib.decompress(story_with_content['original_page_z']) - del story_with_content['original_page_z'] - if story_with_content.get('original_text_z', None): - story_with_content['original_text'] = zlib.decompress(story_with_content['original_text_z']) - del story_with_content['original_text_z'] - if story_with_content.get('story_latest_content_z', None): - story_with_content['story_latest_content'] = zlib.decompress(story_with_content['story_latest_content_z']) - del story_with_content['story_latest_content_z'] - if story_with_content.get('story_original_content_z', None): - story_with_content['story_original_content'] = zlib.decompress(story_with_content['story_original_content_z']) - del story_with_content['story_original_content_z'] + if story_with_content.get("story_content_z", None): + story_with_content["story_content"] = zlib.decompress(story_with_content["story_content_z"]) + del story_with_content["story_content_z"] + if story_with_content.get("original_page_z", None): + story_with_content["original_page"] = zlib.decompress(story_with_content["original_page_z"]) + del story_with_content["original_page_z"] + if story_with_content.get("original_text_z", None): + story_with_content["original_text"] = zlib.decompress(story_with_content["original_text_z"]) + del story_with_content["original_text_z"] + if story_with_content.get("story_latest_content_z", None): + story_with_content["story_latest_content"] = zlib.decompress( + story_with_content["story_latest_content_z"] + ) + del story_with_content["story_latest_content_z"] + if story_with_content.get("story_original_content_z", None): + story_with_content["story_original_content"] = zlib.decompress( + story_with_content["story_original_content_z"] + ) + del story_with_content["story_original_content_z"] sum_bytes += len(bson.BSON.encode(story_with_content)) self.fs_size_bytes = sum_bytes @@ -1686,7 +1801,7 @@ def count_fs_size_bytes(self): self.save() return sum_bytes - + def purge_feed_stories(self, update=True): MStory.purge_feed_stories(feed=self, cutoff=self.story_cutoff) if update: @@ -1695,15 +1810,21 @@ def purge_feed_stories(self, update=True): def purge_author(self, author): all_stories = MStory.objects.filter(story_feed_id=self.pk) author_stories = MStory.objects.filter(story_feed_id=self.pk, story_author_name__iexact=author) - logging.debug(" ---> Deleting %s of %s stories in %s by '%s'." % (author_stories.count(), all_stories.count(), self, author)) + logging.debug( + " ---> Deleting %s of %s stories in %s by '%s'." + % (author_stories.count(), all_stories.count(), self, author) + ) author_stories.delete() def purge_tag(self, tag): all_stories = MStory.objects.filter(story_feed_id=self.pk) tagged_stories = MStory.objects.filter(story_feed_id=self.pk, story_tags__icontains=tag) - logging.debug(" ---> Deleting %s of %s stories in %s by '%s'." % (tagged_stories.count(), all_stories.count(), self, tag)) + logging.debug( + " ---> Deleting %s of %s stories in %s by '%s'." + % (tagged_stories.count(), all_stories.count(), self, tag) + ) tagged_stories.delete() - + # @staticmethod # def clean_invalid_ids(): # history = MFeedFetchHistory.objects(status_code=500, exception__contains='InvalidId:') @@ -1711,43 +1832,42 @@ def purge_tag(self, tag): # for h in history: # u = re.split('InvalidId: (.*?) is not a valid ObjectId\\n$', h.exception)[1] # urls.add((h.feed_id, u)) - # + # # for f, u in urls: # print "db.stories.remove({\"story_feed_id\": %s, \"_id\": \"%s\"})" % (f, u) - def get_stories(self, offset=0, limit=25, order="neweat", force=False): if order == "newest": - stories_db = MStory.objects(story_feed_id=self.pk)[offset:offset+limit] + stories_db = MStory.objects(story_feed_id=self.pk)[offset : offset + limit] elif order == "oldest": - stories_db = MStory.objects(story_feed_id=self.pk).order_by('story_date')[offset:offset+limit] + stories_db = MStory.objects(story_feed_id=self.pk).order_by("story_date")[offset : offset + limit] stories = self.format_stories(stories_db, self.pk) - + return stories - + @classmethod def find_feed_stories(cls, feed_ids, query, order="newest", offset=0, limit=25): - story_ids = SearchStory.query(feed_ids=feed_ids, query=query, order=order, - offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + story_ids = SearchStory.query(feed_ids=feed_ids, query=query, order=order, offset=offset, limit=limit) + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = cls.format_stories(stories_db) - + return stories - + @classmethod - def query_popularity(cls, query, limit, order='newest'): + def query_popularity(cls, query, limit, order="newest"): popularity = {} seen_feeds = set() feed_title_to_id = dict() - + # Collect stories, sort by feed story_ids = SearchStory.global_query(query, order=order, offset=0, limit=limit) for story_hash in story_ids: feed_id, story_id = MStory.split_story_hash(story_hash) feed = Feed.get_by_id(feed_id) - if not feed: continue + if not feed: + continue if feed.feed_title in seen_feeds: feed_id = feed_title_to_id[feed.feed_title] else: @@ -1758,250 +1878,349 @@ def query_popularity(cls, query, limit, order='newest'): # classifiers = feed.save_classifier_counts() well_read_score = feed.well_read_score() popularity[feed_id] = { - 'feed_title': feed.feed_title, - 'feed_url': feed.feed_link, - 'num_subscribers': feed.num_subscribers, - 'feed_id': feed.pk, - 'story_ids': [], - 'authors': {}, - 'read_pct': well_read_score['read_pct'], - 'reader_count': well_read_score['reader_count'], - 'story_count': well_read_score['story_count'], - 'reach_score': well_read_score['reach_score'], - 'share_count': well_read_score['share_count'], - 'ps': 0, - 'ng': 0, - 'classifiers': json.decode(feed.data.feed_classifier_counts), + "feed_title": feed.feed_title, + "feed_url": feed.feed_link, + "num_subscribers": feed.num_subscribers, + "feed_id": feed.pk, + "story_ids": [], + "authors": {}, + "read_pct": well_read_score["read_pct"], + "reader_count": well_read_score["reader_count"], + "story_count": well_read_score["story_count"], + "reach_score": well_read_score["reach_score"], + "share_count": well_read_score["share_count"], + "ps": 0, + "ng": 0, + "classifiers": json.decode(feed.data.feed_classifier_counts), } - if popularity[feed_id]['classifiers']: - for classifier in popularity[feed_id]['classifiers'].get('feed', []): - if int(classifier['feed_id']) == int(feed_id): - popularity[feed_id]['ps'] = classifier['pos'] - popularity[feed_id]['ng'] = -1 * classifier['neg'] - popularity[feed_id]['story_ids'].append(story_hash) - - sorted_popularity = sorted(list(popularity.values()), key=lambda x: x['reach_score'], - reverse=True) - + if popularity[feed_id]["classifiers"]: + for classifier in popularity[feed_id]["classifiers"].get("feed", []): + if int(classifier["feed_id"]) == int(feed_id): + popularity[feed_id]["ps"] = classifier["pos"] + popularity[feed_id]["ng"] = -1 * classifier["neg"] + popularity[feed_id]["story_ids"].append(story_hash) + + sorted_popularity = sorted(list(popularity.values()), key=lambda x: x["reach_score"], reverse=True) + # Extract story authors from feeds for feed in sorted_popularity: - story_ids = feed['story_ids'] + story_ids = feed["story_ids"] stories_db = MStory.objects(story_hash__in=story_ids) stories = cls.format_stories(stories_db) for story in stories: - story['story_permalink'] = story['story_permalink'][:250] - if story['story_authors'] not in feed['authors']: - feed['authors'][story['story_authors']] = { - 'name': story['story_authors'], - 'count': 0, - 'ps': 0, - 'ng': 0, - 'tags': {}, - 'stories': [], + story["story_permalink"] = story["story_permalink"][:250] + if story["story_authors"] not in feed["authors"]: + feed["authors"][story["story_authors"]] = { + "name": story["story_authors"], + "count": 0, + "ps": 0, + "ng": 0, + "tags": {}, + "stories": [], } - author = feed['authors'][story['story_authors']] + author = feed["authors"][story["story_authors"]] seen = False - for seen_story in author['stories']: - if seen_story['url'] == story['story_permalink']: + for seen_story in author["stories"]: + if seen_story["url"] == story["story_permalink"]: seen = True break else: - author['stories'].append({ - 'title': story['story_title'], - 'url': story['story_permalink'], - 'date': story['story_date'], - }) - author['count'] += 1 - if seen: continue # Don't recount tags - - if feed['classifiers']: - for classifier in feed['classifiers'].get('author', []): - if classifier['author'] == author['name']: - author['ps'] = classifier['pos'] - author['ng'] = -1 * classifier['neg'] - - for tag in story['story_tags']: - if tag not in author['tags']: - author['tags'][tag] = {'name': tag, 'count': 0, 'ps': 0, 'ng': 0} - author['tags'][tag]['count'] += 1 - if feed['classifiers']: - for classifier in feed['classifiers'].get('tag', []): - if classifier['tag'] == tag: - author['tags'][tag]['ps'] = classifier['pos'] - author['tags'][tag]['ng'] = -1 * classifier['neg'] - - sorted_authors = sorted(list(feed['authors'].values()), key=lambda x: x['count']) - feed['authors'] = sorted_authors - + author["stories"].append( + { + "title": story["story_title"], + "url": story["story_permalink"], + "date": story["story_date"], + } + ) + author["count"] += 1 + if seen: + continue # Don't recount tags + + if feed["classifiers"]: + for classifier in feed["classifiers"].get("author", []): + if classifier["author"] == author["name"]: + author["ps"] = classifier["pos"] + author["ng"] = -1 * classifier["neg"] + + for tag in story["story_tags"]: + if tag not in author["tags"]: + author["tags"][tag] = {"name": tag, "count": 0, "ps": 0, "ng": 0} + author["tags"][tag]["count"] += 1 + if feed["classifiers"]: + for classifier in feed["classifiers"].get("tag", []): + if classifier["tag"] == tag: + author["tags"][tag]["ps"] = classifier["pos"] + author["tags"][tag]["ng"] = -1 * classifier["neg"] + + sorted_authors = sorted(list(feed["authors"].values()), key=lambda x: x["count"]) + feed["authors"] = sorted_authors + # pprint(sorted_popularity) return sorted_popularity - + def well_read_score(self): """Average percentage of stories read vs published across recently active subscribers""" from apps.reader.models import UserSubscription from apps.social.models import MSharedStory - + r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) p = r.pipeline() - + shared_stories = MSharedStory.objects(story_feed_id=self.pk).count() - - subscribing_users = UserSubscription.objects.filter(feed_id=self.pk).values('user_id') - subscribing_user_ids = [sub['user_id'] for sub in subscribing_users] - + + subscribing_users = UserSubscription.objects.filter(feed_id=self.pk).values("user_id") + subscribing_user_ids = [sub["user_id"] for sub in subscribing_users] + for user_id in subscribing_user_ids: user_rs = "RS:%s:%s" % (user_id, self.pk) p.scard(user_rs) - + counts = p.execute() counts = [c for c in counts if c > 0] reader_count = len(counts) - - now = datetime.datetime.now().strftime('%s') - unread_cutoff = self.unread_cutoff.strftime('%s') + + now = datetime.datetime.now().strftime("%s") + unread_cutoff = self.unread_cutoff.strftime("%s") story_count = len(r.zrangebyscore("zF:%s" % self.pk, max=now, min=unread_cutoff)) if reader_count and story_count: average_pct = (sum(counts) / float(reader_count)) / float(story_count) else: average_pct = 0 - + reach_score = average_pct * reader_count * story_count - - return {'read_pct': average_pct, 'reader_count': reader_count, - 'reach_score': reach_score, 'story_count': story_count, - 'share_count': shared_stories} - + + return { + "read_pct": average_pct, + "reader_count": reader_count, + "reach_score": reach_score, + "story_count": story_count, + "share_count": shared_stories, + } + @classmethod def xls_query_popularity(cls, queries, limit): import xlsxwriter from xlsxwriter.utility import xl_rowcol_to_cell if isinstance(queries, str): - queries = [q.strip() for q in queries.split(',')] - - title = 'NewsBlur-%s.xlsx' % slugify('-'.join(queries)) + queries = [q.strip() for q in queries.split(",")] + + title = "NewsBlur-%s.xlsx" % slugify("-".join(queries)) workbook = xlsxwriter.Workbook(title) - bold = workbook.add_format({'bold': 1}) - date_format = workbook.add_format({'num_format': 'mmm d yyyy'}) - unread_format = workbook.add_format({'font_color': '#E0E0E0'}) - + bold = workbook.add_format({"bold": 1}) + date_format = workbook.add_format({"num_format": "mmm d yyyy"}) + unread_format = workbook.add_format({"font_color": "#E0E0E0"}) + for query in queries: worksheet = workbook.add_worksheet(query) row = 1 col = 0 - worksheet.write(0, col, 'Publisher', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, 'Feed URL', bold) - worksheet.set_column(col, col, 20); col += 1 - worksheet.write(0, col, 'Reach score', bold) - worksheet.write_comment(0, col, 'Feeds are sorted based on this score. It\'s simply the # of readers * # of stories in the past 30 days * the percentage of stories that are actually read.') - worksheet.set_column(col, col, 9); col += 1 - worksheet.write(0, col, '# subs', bold) - worksheet.write_comment(0, col, 'Total number of subscribers on NewsBlur, not necessarily active') - worksheet.set_column(col, col, 5); col += 1 - worksheet.write(0, col, '# readers', bold) - worksheet.write_comment(0, col, 'Total number of active subscribers who have read a story from the feed in the past 30 days.') - worksheet.set_column(col, col, 8); col += 1 + worksheet.write(0, col, "Publisher", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "Feed URL", bold) + worksheet.set_column(col, col, 20) + col += 1 + worksheet.write(0, col, "Reach score", bold) + worksheet.write_comment( + 0, + col, + "Feeds are sorted based on this score. It's simply the # of readers * # of stories in the past 30 days * the percentage of stories that are actually read.", + ) + worksheet.set_column(col, col, 9) + col += 1 + worksheet.write(0, col, "# subs", bold) + worksheet.write_comment(0, col, "Total number of subscribers on NewsBlur, not necessarily active") + worksheet.set_column(col, col, 5) + col += 1 + worksheet.write(0, col, "# readers", bold) + worksheet.write_comment( + 0, + col, + "Total number of active subscribers who have read a story from the feed in the past 30 days.", + ) + worksheet.set_column(col, col, 8) + col += 1 worksheet.write(0, col, "read pct", bold) - worksheet.write_comment(0, col, "Of the active subscribers reading this feed in the past 30 days, this is the percentage of stories the average subscriber reads. Values over 100 pct signify that the feed has many shared stories, which throws off the number slightly but not significantly.") - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# stories 30d', bold) - worksheet.write_comment(0, col, "It's important to ignore feeds that haven't published anything in the last 30 days, which is why this is part of the Reach Score.") - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, '# shared', bold) - worksheet.write_comment(0, col, 'Number of stories from this feed that were shared on NewsBlur. This is a strong signal of interest although it is not included in the Reach Score.') - worksheet.set_column(col, col, 7); col += 1 - worksheet.write(0, col, '# feed pos', bold) - worksheet.write_comment(0, col, 'Number of times this feed was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# feed neg', bold) - worksheet.write_comment(0, col, 'Number of times this feed was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, 'Author', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, '# author pos', bold) - worksheet.write_comment(0, col, 'Number of times this author was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, '# author neg', bold) - worksheet.write_comment(0, col, 'Number of times this author was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, 'Story title', bold) - worksheet.set_column(col, col, 30); col += 1 - worksheet.write(0, col, 'Story URL', bold) - worksheet.set_column(col, col, 20); col += 1 - worksheet.write(0, col, 'Story date', bold) - worksheet.set_column(col, col, 10); col += 1 - worksheet.write(0, col, 'Tag', bold) - worksheet.set_column(col, col, 15); col += 1 - worksheet.write(0, col, 'Tag count', bold) - worksheet.write_comment(0, col, 'Number of times this tag is used in other stories that also contain the search query.') - worksheet.set_column(col, col, 8); col += 1 - worksheet.write(0, col, '# tag pos', bold) - worksheet.write_comment(0, col, 'Number of times this tag was trained with a thumbs up. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 7); col += 1 - worksheet.write(0, col, '# tag neg', bold) - worksheet.write_comment(0, col, 'Number of times this tag was trained with a thumbs down. Users use training to hide stories they don\'t want to see while highlighting those that they do.') - worksheet.set_column(col, col, 7); col += 1 + worksheet.write_comment( + 0, + col, + "Of the active subscribers reading this feed in the past 30 days, this is the percentage of stories the average subscriber reads. Values over 100 pct signify that the feed has many shared stories, which throws off the number slightly but not significantly.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# stories 30d", bold) + worksheet.write_comment( + 0, + col, + "It's important to ignore feeds that haven't published anything in the last 30 days, which is why this is part of the Reach Score.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "# shared", bold) + worksheet.write_comment( + 0, + col, + "Number of stories from this feed that were shared on NewsBlur. This is a strong signal of interest although it is not included in the Reach Score.", + ) + worksheet.set_column(col, col, 7) + col += 1 + worksheet.write(0, col, "# feed pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this feed was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# feed neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this feed was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "Author", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "# author pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this author was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "# author neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this author was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "Story title", bold) + worksheet.set_column(col, col, 30) + col += 1 + worksheet.write(0, col, "Story URL", bold) + worksheet.set_column(col, col, 20) + col += 1 + worksheet.write(0, col, "Story date", bold) + worksheet.set_column(col, col, 10) + col += 1 + worksheet.write(0, col, "Tag", bold) + worksheet.set_column(col, col, 15) + col += 1 + worksheet.write(0, col, "Tag count", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag is used in other stories that also contain the search query.", + ) + worksheet.set_column(col, col, 8) + col += 1 + worksheet.write(0, col, "# tag pos", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag was trained with a thumbs up. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 7) + col += 1 + worksheet.write(0, col, "# tag neg", bold) + worksheet.write_comment( + 0, + col, + "Number of times this tag was trained with a thumbs down. Users use training to hide stories they don't want to see while highlighting those that they do.", + ) + worksheet.set_column(col, col, 7) + col += 1 popularity = cls.query_popularity(query, limit=limit) - + for feed in popularity: col = 0 - worksheet.write(row, col, feed['feed_title']); col += 1 - worksheet.write_url(row, col, feed.get('feed_url') or ""); col += 1 - worksheet.conditional_format(row, col, row, col+8, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col, "=%s*%s*%s" % ( - xl_rowcol_to_cell(row, col+2), - xl_rowcol_to_cell(row, col+3), - xl_rowcol_to_cell(row, col+4), - )); col += 1 - worksheet.write(row, col, feed['num_subscribers']); col += 1 - worksheet.write(row, col, feed['reader_count']); col += 1 - worksheet.write(row, col, feed['read_pct']); col += 1 - worksheet.write(row, col, feed['story_count']); col += 1 - worksheet.write(row, col, feed['share_count']); col += 1 - worksheet.write(row, col, feed['ps']); col += 1 - worksheet.write(row, col, feed['ng']); col += 1 - for author in feed['authors']: + worksheet.write(row, col, feed["feed_title"]) + col += 1 + worksheet.write_url(row, col, feed.get("feed_url") or "") + col += 1 + worksheet.conditional_format( + row, + col, + row, + col + 8, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write( + row, + col, + "=%s*%s*%s" + % ( + xl_rowcol_to_cell(row, col + 2), + xl_rowcol_to_cell(row, col + 3), + xl_rowcol_to_cell(row, col + 4), + ), + ) + col += 1 + worksheet.write(row, col, feed["num_subscribers"]) + col += 1 + worksheet.write(row, col, feed["reader_count"]) + col += 1 + worksheet.write(row, col, feed["read_pct"]) + col += 1 + worksheet.write(row, col, feed["story_count"]) + col += 1 + worksheet.write(row, col, feed["share_count"]) + col += 1 + worksheet.write(row, col, feed["ps"]) + col += 1 + worksheet.write(row, col, feed["ng"]) + col += 1 + for author in feed["authors"]: row += 1 - worksheet.conditional_format(row, col, row, col+2, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col, author['name']) - worksheet.write(row, col+1, author['ps']) - worksheet.write(row, col+2, author['ng']) - for story in author['stories']: - worksheet.write(row, col+3, story['title']) - worksheet.write_url(row, col+4, story['url']) - worksheet.write_datetime(row, col+5, story['date'], date_format) + worksheet.conditional_format( + row, + col, + row, + col + 2, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write(row, col, author["name"]) + worksheet.write(row, col + 1, author["ps"]) + worksheet.write(row, col + 2, author["ng"]) + for story in author["stories"]: + worksheet.write(row, col + 3, story["title"]) + worksheet.write_url(row, col + 4, story["url"]) + worksheet.write_datetime(row, col + 5, story["date"], date_format) row += 1 - for tag in list(author['tags'].values()): - worksheet.conditional_format(row, col+7, row, col+9, {'type': 'cell', - 'criteria': '==', - 'value': 0, - 'format': unread_format}) - worksheet.write(row, col+6, tag['name']) - worksheet.write(row, col+7, tag['count']) - worksheet.write(row, col+8, tag['ps']) - worksheet.write(row, col+9, tag['ng']) + for tag in list(author["tags"].values()): + worksheet.conditional_format( + row, + col + 7, + row, + col + 9, + {"type": "cell", "criteria": "==", "value": 0, "format": unread_format}, + ) + worksheet.write(row, col + 6, tag["name"]) + worksheet.write(row, col + 7, tag["count"]) + worksheet.write(row, col + 8, tag["ps"]) + worksheet.write(row, col + 9, tag["ng"]) row += 1 workbook.close() return title - + def find_stories(self, query, order="newest", offset=0, limit=25): - story_ids = SearchStory.query(feed_ids=[self.pk], query=query, order=order, - offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + story_ids = SearchStory.query( + feed_ids=[self.pk], query=query, order=order, offset=offset, limit=limit + ) + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = self.format_stories(stories_db, self.pk) - + return stories - + @classmethod def format_stories(cls, stories_db, feed_id=None, include_permalinks=False): stories = [] @@ -2009,33 +2228,34 @@ def format_stories(cls, stories_db, feed_id=None, include_permalinks=False): for story_db in stories_db: story = cls.format_story(story_db, feed_id, include_permalinks=include_permalinks) stories.append(story) - + return stories - + @classmethod - def format_story(cls, story_db, feed_id=None, text=False, include_permalinks=False, - show_changes=False): + def format_story(cls, story_db, feed_id=None, text=False, include_permalinks=False, show_changes=False): if isinstance(story_db.story_content_z, str): story_db.story_content_z = base64.b64decode(story_db.story_content_z) - - story_content = '' + + story_content = "" latest_story_content = None has_changes = False - if (not show_changes and - hasattr(story_db, 'story_latest_content_z') and - story_db.story_latest_content_z): + if ( + not show_changes + and hasattr(story_db, "story_latest_content_z") + and story_db.story_latest_content_z + ): try: latest_story_content = smart_str(zlib.decompress(story_db.story_latest_content_z)) except DjangoUnicodeDecodeError: latest_story_content = zlib.decompress(story_db.story_latest_content_z) if story_db.story_content_z: story_content = smart_str(zlib.decompress(story_db.story_content_z)) - - if ' 80: - story_title = story_title[:80] + '...' - - story = {} - story['story_hash'] = getattr(story_db, 'story_hash', None) - story['story_tags'] = story_db.story_tags or [] - story['story_date'] = story_db.story_date.replace(tzinfo=None) - story['story_timestamp'] = story_db.story_date.strftime('%s') - story['story_authors'] = story_db.story_author_name or "" - story['story_title'] = story_title + story_title = story_title[:80] + "..." + + story = {} + story["story_hash"] = getattr(story_db, "story_hash", None) + story["story_tags"] = story_db.story_tags or [] + story["story_date"] = story_db.story_date.replace(tzinfo=None) + story["story_timestamp"] = story_db.story_date.strftime("%s") + story["story_authors"] = story_db.story_author_name or "" + story["story_title"] = story_title if blank_story_title: - story['story_title_blank'] = True - story['story_content'] = story_content - story['story_permalink'] = story_db.story_permalink - story['image_urls'] = story_db.image_urls - story['secure_image_urls']= cls.secure_image_urls(story_db.image_urls) - story['secure_image_thumbnails']= cls.secure_image_thumbnails(story_db.image_urls) - story['story_feed_id'] = feed_id or story_db.story_feed_id - story['has_modifications']= has_changes - story['comment_count'] = story_db.comment_count if hasattr(story_db, 'comment_count') else 0 - story['comment_user_ids'] = story_db.comment_user_ids if hasattr(story_db, 'comment_user_ids') else [] - story['share_count'] = story_db.share_count if hasattr(story_db, 'share_count') else 0 - story['share_user_ids'] = story_db.share_user_ids if hasattr(story_db, 'share_user_ids') else [] - story['guid_hash'] = story_db.guid_hash if hasattr(story_db, 'guid_hash') else None - if hasattr(story_db, 'source_user_id'): - story['source_user_id'] = story_db.source_user_id - story['id'] = story_db.story_guid or story_db.story_date - if hasattr(story_db, 'starred_date'): - story['starred_date'] = story_db.starred_date - if hasattr(story_db, 'user_tags'): - story['user_tags'] = story_db.user_tags - if hasattr(story_db, 'user_notes'): - story['user_notes'] = story_db.user_notes - if hasattr(story_db, 'highlights'): - story['highlights'] = story_db.highlights - if hasattr(story_db, 'shared_date'): - story['shared_date'] = story_db.shared_date - if hasattr(story_db, 'comments'): - story['comments'] = story_db.comments - if hasattr(story_db, 'user_id'): - story['user_id'] = story_db.user_id - if include_permalinks and hasattr(story_db, 'blurblog_permalink'): - story['blurblog_permalink'] = story_db.blurblog_permalink() + story["story_title_blank"] = True + story["story_content"] = story_content + story["story_permalink"] = story_db.story_permalink + story["image_urls"] = story_db.image_urls + story["secure_image_urls"] = cls.secure_image_urls(story_db.image_urls) + story["secure_image_thumbnails"] = cls.secure_image_thumbnails(story_db.image_urls) + story["story_feed_id"] = feed_id or story_db.story_feed_id + story["has_modifications"] = has_changes + story["comment_count"] = story_db.comment_count if hasattr(story_db, "comment_count") else 0 + story["comment_user_ids"] = story_db.comment_user_ids if hasattr(story_db, "comment_user_ids") else [] + story["share_count"] = story_db.share_count if hasattr(story_db, "share_count") else 0 + story["share_user_ids"] = story_db.share_user_ids if hasattr(story_db, "share_user_ids") else [] + story["guid_hash"] = story_db.guid_hash if hasattr(story_db, "guid_hash") else None + if hasattr(story_db, "source_user_id"): + story["source_user_id"] = story_db.source_user_id + story["id"] = story_db.story_guid or story_db.story_date + if hasattr(story_db, "starred_date"): + story["starred_date"] = story_db.starred_date + if hasattr(story_db, "user_tags"): + story["user_tags"] = story_db.user_tags + if hasattr(story_db, "user_notes"): + story["user_notes"] = story_db.user_notes + if hasattr(story_db, "highlights"): + story["highlights"] = story_db.highlights + if hasattr(story_db, "shared_date"): + story["shared_date"] = story_db.shared_date + if hasattr(story_db, "comments"): + story["comments"] = story_db.comments + if hasattr(story_db, "user_id"): + story["user_id"] = story_db.user_id + if include_permalinks and hasattr(story_db, "blurblog_permalink"): + story["blurblog_permalink"] = story_db.blurblog_permalink() if text: - soup = BeautifulSoup(story['story_content'], features="lxml") - text = ''.join(soup.findAll(text=True)) - text = re.sub(r'\n+', '\n\n', text) - text = re.sub(r'\t+', '\t', text) - story['text'] = text - + soup = BeautifulSoup(story["story_content"], features="lxml") + text = "".join(soup.findAll(text=True)) + text = re.sub(r"\n+", "\n\n", text) + text = re.sub(r"\t+", "\t", text) + story["text"] = text + return story - + @classmethod def secure_image_urls(cls, urls): - signed_urls = [create_imageproxy_signed_url(settings.IMAGES_URL, - settings.IMAGES_SECRET_KEY, - url) for url in urls] + signed_urls = [ + create_imageproxy_signed_url(settings.IMAGES_URL, settings.IMAGES_SECRET_KEY, url) for url in urls + ] return dict(zip(urls, signed_urls)) - + @classmethod def secure_image_thumbnails(cls, urls, size=192): - signed_urls = [create_imageproxy_signed_url(settings.IMAGES_URL, - settings.IMAGES_SECRET_KEY, - url, - size) for url in urls] + signed_urls = [ + create_imageproxy_signed_url(settings.IMAGES_URL, settings.IMAGES_SECRET_KEY, url, size) + for url in urls + ] return dict(zip(urls, signed_urls)) - + def get_tags(self, entry): fcat = [] - if 'tags' in entry: + if "tags" in entry: for tcat in entry.tags: term = None - if hasattr(tcat, 'label') and tcat.label: + if hasattr(tcat, "label") and tcat.label: term = tcat.label - elif hasattr(tcat, 'term') and tcat.term: + elif hasattr(tcat, "term") and tcat.term: term = tcat.term if not term or "CDATA" in term: continue qcat = term.strip() - if ',' in qcat or '/' in qcat: - qcat = qcat.replace(',', '/').split('/') + if "," in qcat or "/" in qcat: + qcat = qcat.replace(",", "/").split("/") else: qcat = [qcat] for zcat in qcat: tagname = zcat.lower() - while ' ' in tagname: - tagname = tagname.replace(' ', ' ') + while " " in tagname: + tagname = tagname.replace(" ", " ") tagname = tagname.strip() - if not tagname or tagname == ' ': + if not tagname or tagname == " ": continue fcat.append(tagname) fcat = [strip_tags(t)[:250] for t in fcat[:12]] return fcat - + @classmethod def get_permalink(cls, entry): - link = entry.get('link') + link = entry.get("link") if not link: - links = entry.get('links') + links = entry.get("links") if links: - link = links[0].get('href') + link = links[0].get("href") if not link: - link = entry.get('id') + link = entry.get("id") return link - + def _exists_story(self, story, story_content, existing_stories, new_story_hashes, lightweight=False): story_in_system = None story_has_changed = False story_link = self.get_permalink(story) existing_stories_hashes = list(existing_stories.keys()) - story_pub_date = story.get('published') + story_pub_date = story.get("published") # story_published_now = story.get('published_now', False) # start_date = story_pub_date - datetime.timedelta(hours=8) # end_date = story_pub_date + datetime.timedelta(hours=8) @@ -2166,110 +2386,146 @@ def _exists_story(self, story, story_content, existing_stories, new_story_hashes if isinstance(existing_story.id, str): # Correcting a MongoDB bug existing_story.story_guid = existing_story.id - - if story.get('story_hash') == existing_story.story_hash: + + if story.get("story_hash") == existing_story.story_hash: story_in_system = existing_story - elif (story.get('story_hash') in existing_stories_hashes and - story.get('story_hash') != existing_story.story_hash): + elif ( + story.get("story_hash") in existing_stories_hashes + and story.get("story_hash") != existing_story.story_hash + ): # Story already exists but is not this one continue - elif (existing_story.story_hash in new_story_hashes and - story.get('story_hash') != existing_story.story_hash): - # Story coming up later + elif ( + existing_story.story_hash in new_story_hashes + and story.get("story_hash") != existing_story.story_hash + ): + # Story coming up later continue - if 'story_latest_content_z' in existing_story: + if "story_latest_content_z" in existing_story: existing_story_content = smart_str(zlib.decompress(existing_story.story_latest_content_z)) - elif 'story_latest_content' in existing_story: + elif "story_latest_content" in existing_story: existing_story_content = existing_story.story_latest_content - elif 'story_content_z' in existing_story: + elif "story_content_z" in existing_story: existing_story_content = smart_str(zlib.decompress(existing_story.story_content_z)) - elif 'story_content' in existing_story: + elif "story_content" in existing_story: existing_story_content = existing_story.story_content else: - existing_story_content = '' - - + existing_story_content = "" + # Title distance + content distance, checking if story changed - story_title_difference = abs(levenshtein_distance(story.get('title'), - existing_story.story_title)) - - title_ratio = difflib.SequenceMatcher(None, story.get('title', ""), - existing_story.story_title).ratio() - if title_ratio < .75: continue - + story_title_difference = abs(levenshtein_distance(story.get("title"), existing_story.story_title)) + + title_ratio = difflib.SequenceMatcher( + None, story.get("title", ""), existing_story.story_title + ).ratio() + if title_ratio < 0.75: + continue + story_timedelta = existing_story.story_date - story_pub_date # logging.debug('Story pub date: %s %s (%s, %s)' % (existing_story.story_date, story_pub_date, title_ratio, story_timedelta)) - if abs(story_timedelta.days) >= 2: continue - + if abs(story_timedelta.days) >= 2: + continue + seq = difflib.SequenceMatcher(None, story_content, existing_story_content) - + similiar_length_min = 1000 - if (existing_story.story_permalink == story_link and - existing_story.story_title == story.get('title')): + if existing_story.story_permalink == story_link and existing_story.story_title == story.get( + "title" + ): similiar_length_min = 20 - + # Skip content check if already failed due to a timeout. This way we catch titles - if lightweight: continue + if lightweight: + continue - if (seq + if ( + seq and story_content and len(story_content) > similiar_length_min and existing_story_content - and seq.real_quick_ratio() > .9 - and seq.quick_ratio() > .95): + and seq.real_quick_ratio() > 0.9 + and seq.quick_ratio() > 0.95 + ): content_ratio = seq.ratio() - if story_title_difference > 0 and content_ratio > .98: + if story_title_difference > 0 and content_ratio > 0.98: story_in_system = existing_story if story_title_difference > 0 or content_ratio < 1.0: if settings.DEBUG: - logging.debug(" ---> Title difference - %s/%s (%s): %s" % (story.get('title'), existing_story.story_title, story_title_difference, content_ratio)) + logging.debug( + " ---> Title difference - %s/%s (%s): %s" + % ( + story.get("title"), + existing_story.story_title, + story_title_difference, + content_ratio, + ) + ) story_has_changed = True break - + # More restrictive content distance, still no story match - if not story_in_system and content_ratio > .98: + if not story_in_system and content_ratio > 0.98: if settings.DEBUG: - logging.debug(" ---> Content difference - %s/%s (%s): %s" % (story.get('title'), existing_story.story_title, story_title_difference, content_ratio)) + logging.debug( + " ---> Content difference - %s/%s (%s): %s" + % ( + story.get("title"), + existing_story.story_title, + story_title_difference, + content_ratio, + ) + ) story_in_system = existing_story story_has_changed = True break - + if story_in_system and not story_has_changed: if story_content != existing_story_content: if settings.DEBUG: - logging.debug(" ---> Content difference - %s (%s)/%s (%s)" % (story.get('title'), len(story_content), existing_story.story_title, len(existing_story_content))) + logging.debug( + " ---> Content difference - %s (%s)/%s (%s)" + % ( + story.get("title"), + len(story_content), + existing_story.story_title, + len(existing_story_content), + ) + ) story_has_changed = True if story_link != existing_story.story_permalink: if settings.DEBUG: - logging.debug(" ---> Permalink difference - %s/%s" % (story_link, existing_story.story_permalink)) + logging.debug( + " ---> Permalink difference - %s/%s" + % (story_link, existing_story.story_permalink) + ) story_has_changed = True # if story_pub_date != existing_story.story_date: # story_has_changed = True break - - + # if story_has_changed or not story_in_system: - # print 'New/updated story: %s' % (story), + # print 'New/updated story: %s' % (story), return story_in_system, story_has_changed - + def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=False, pro_speed=False): if self.min_to_decay and not force and not premium_speed: return self.min_to_decay - + from apps.notifications.models import MUserFeedNotification - + if premium_speed: self.active_premium_subscribers += 1 if pro_speed: self.pro_subscribers += 1 - - spd = self.stories_last_month / 30.0 - subs = (self.active_premium_subscribers + - ((self.active_subscribers - self.active_premium_subscribers) / 10.0)) + + spd = self.stories_last_month / 30.0 + subs = self.active_premium_subscribers + ( + (self.active_subscribers - self.active_premium_subscribers) / 10.0 + ) notification_count = MUserFeedNotification.objects.filter(feed_id=self.pk).count() - # Calculate sub counts: + # Calculate sub counts: # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 10 AND stories_last_month >= 30; # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers > 1 AND active_premium_subscribers < 10 AND stories_last_month >= 30; # SELECT COUNT(*) FROM feeds WHERE active_premium_subscribers = 1 AND stories_last_month >= 30; @@ -2295,7 +2551,7 @@ def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=Fal if subs > 1: total = 60 - (spd * 60) else: - total = 60*6 - (spd * 60*6) + total = 60 * 6 - (spd * 60 * 6) elif spd == 0: if subs > 1: total = 60 * 6 @@ -2303,7 +2559,7 @@ def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=Fal total = 60 * 12 else: total = 60 * 24 - months_since_last_story = seconds_timesince(self.last_story_date) / (60*60*24*30) + months_since_last_story = seconds_timesince(self.last_story_date) / (60 * 60 * 24 * 30) total *= max(1, months_since_last_story) # updates_per_day_delay = 3 * 60 / max(.25, ((max(0, self.active_subscribers)**.2) # * (self.stories_last_month**0.25))) @@ -2324,27 +2580,27 @@ def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=Fal if self.is_push: fetch_history = MFetchHistory.feed(self.pk) - if len(fetch_history['push_history']): + if len(fetch_history["push_history"]): total = total * 12 - + # Any notifications means a 30 min minumum if notification_count > 0: total = min(total, 30) # 4 hour max for premiums, 48 hour max for free if subs >= 1: - total = min(total, 60*4*1) + total = min(total, 60 * 4 * 1) else: - total = min(total, 60*24*2) + total = min(total, 60 * 24 * 2) # Craigslist feeds get 6 hours minimum - if 'craigslist' in self.feed_address: - total = max(total, 60*6) + if "craigslist" in self.feed_address: + total = max(total, 60 * 6) # Twitter feeds get 2 hours minimum - if 'twitter' in self.feed_address: - total = max(total, 60*2) - + if "twitter" in self.feed_address: + total = max(total, 60 * 2) + # Pro subscribers get absolute minimum if self.pro_subscribers and self.pro_subscribers >= 1: if self.stories_last_month == 0: @@ -2353,72 +2609,80 @@ def get_next_scheduled_update(self, force=False, verbose=True, premium_speed=Fal total = min(total, settings.PRO_MINUTES_BETWEEN_FETCHES) if verbose: - logging.debug(" ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s/%s/%s Stories/day: %s" % ( - self.log_title[:30], total, - self.num_subscribers, - self.active_subscribers, - self.active_premium_subscribers, - self.archive_subscribers, - self.pro_subscribers, - spd)) + logging.debug( + " ---> [%-30s] Fetched every %s min - Subs: %s/%s/%s/%s/%s Stories/day: %s" + % ( + self.log_title[:30], + total, + self.num_subscribers, + self.active_subscribers, + self.active_premium_subscribers, + self.archive_subscribers, + self.pro_subscribers, + spd, + ) + ) return total - + def set_next_scheduled_update(self, verbose=False, skip_scheduling=False): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) total = self.get_next_scheduled_update(force=True, verbose=verbose) error_count = self.error_count - + if error_count: total = total * error_count - total = min(total, 60*24*7) + total = min(total, 60 * 24 * 7) if verbose: - logging.debug(' ---> [%-30s] ~FBScheduling feed fetch geometrically: ' - '~SB%s errors. Time: %s min' % ( - self.log_title[:30], self.errors_since_good, total)) - + logging.debug( + " ---> [%-30s] ~FBScheduling feed fetch geometrically: " + "~SB%s errors. Time: %s min" % (self.log_title[:30], self.errors_since_good, total) + ) + random_factor = random.randint(0, int(total)) / 4 - next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta( - minutes = total + random_factor) + next_scheduled_update = datetime.datetime.utcnow() + datetime.timedelta(minutes=total + random_factor) original_min_to_decay = self.min_to_decay self.min_to_decay = total - + delta = self.next_scheduled_update - datetime.datetime.now() minutes_to_next_fetch = (delta.seconds + (delta.days * 24 * 3600)) / 60 if minutes_to_next_fetch > self.min_to_decay or not skip_scheduling: self.next_scheduled_update = next_scheduled_update if self.active_subscribers >= 1: - r.zadd('scheduled_updates', { self.pk: self.next_scheduled_update.strftime('%s') }) - r.zrem('tasked_feeds', self.pk) - r.srem('queued_feeds', self.pk) - - updated_fields = ['last_update', 'next_scheduled_update'] + r.zadd("scheduled_updates", {self.pk: self.next_scheduled_update.strftime("%s")}) + r.zrem("tasked_feeds", self.pk) + r.srem("queued_feeds", self.pk) + + updated_fields = ["last_update", "next_scheduled_update"] if self.min_to_decay != original_min_to_decay: - updated_fields.append('min_to_decay') + updated_fields.append("min_to_decay") self.save(update_fields=updated_fields) - + @property def error_count(self): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - fetch_errors = int(r.zscore('error_feeds', self.pk) or 0) - + fetch_errors = int(r.zscore("error_feeds", self.pk) or 0) + return fetch_errors + self.errors_since_good - + def schedule_feed_fetch_immediately(self, verbose=True): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) if not self.num_subscribers: - logging.debug(' ---> [%-30s] Not scheduling feed fetch immediately, no subs.' % (self.log_title[:30])) + logging.debug( + " ---> [%-30s] Not scheduling feed fetch immediately, no subs." % (self.log_title[:30]) + ) return self - + if verbose: - logging.debug(' ---> [%-30s] Scheduling feed fetch immediately...' % (self.log_title[:30])) - + logging.debug(" ---> [%-30s] Scheduling feed fetch immediately..." % (self.log_title[:30])) + self.next_scheduled_update = datetime.datetime.utcnow() - r.zadd('scheduled_updates', { self.pk: self.next_scheduled_update.strftime('%s') }) + r.zadd("scheduled_updates", {self.pk: self.next_scheduled_update.strftime("%s")}) return self.save() - + def setup_push(self): from apps.push.models import PushSubscription + try: push = self.push except PushSubscription.DoesNotExist: @@ -2426,35 +2690,38 @@ def setup_push(self): else: self.is_push = push.verified self.save() - + def queue_pushed_feed_xml(self, xml, latest_push_date_delta=None): r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) queue_size = r.llen("push_feeds") - + if latest_push_date_delta: - latest_push_date_delta = "%s" % str(latest_push_date_delta).split('.', 2)[0] + latest_push_date_delta = "%s" % str(latest_push_date_delta).split(".", 2)[0] if queue_size > 1000: self.schedule_feed_fetch_immediately() else: - logging.debug(' ---> [%-30s] [%s] ~FB~SBQueuing pushed stories, last pushed %s...' % (self.log_title[:30], self.pk, latest_push_date_delta)) + logging.debug( + " ---> [%-30s] [%s] ~FB~SBQueuing pushed stories, last pushed %s..." + % (self.log_title[:30], self.pk, latest_push_date_delta) + ) self.set_next_scheduled_update() - PushFeeds.apply_async(args=(self.pk, xml), queue='push_feeds') - + PushFeeds.apply_async(args=(self.pk, xml), queue="push_feeds") + # def calculate_collocations_story_content(self, # collocation_measures=TrigramAssocMeasures, # collocation_finder=TrigramCollocationFinder): # stories = MStory.objects.filter(story_feed_id=self.pk) # story_content = ' '.join([s.story_content for s in stories if s.story_content]) # return self.calculate_collocations(story_content, collocation_measures, collocation_finder) - # + # # def calculate_collocations_story_title(self, # collocation_measures=BigramAssocMeasures, # collocation_finder=BigramCollocationFinder): # stories = MStory.objects.filter(story_feed_id=self.pk) # story_titles = ' '.join([s.story_title for s in stories if s.story_title]) # return self.calculate_collocations(story_titles, collocation_measures, collocation_finder) - # + # # def calculate_collocations(self, content, # collocation_measures=TrigramAssocMeasures, # collocation_finder=TrigramCollocationFinder): @@ -2467,35 +2734,37 @@ def queue_pushed_feed_xml(self, xml, latest_push_date_delta=None): # print "ValueError, ignoring: %s" % e # content = re.sub(r']*>', '', content) # content = re.split(r"[^A-Za-z-'&]+", content) - # + # # finder = collocation_finder.from_words(content) # finder.apply_freq_filter(3) # best = finder.nbest(collocation_measures.pmi, 10) # phrases = [' '.join(phrase) for phrase in best] - # + # # return phrases # class FeedCollocations(models.Model): # feed = models.ForeignKey(Feed) # phrase = models.CharField(max_length=500) - + + class FeedData(models.Model): - feed = AutoOneToOneField(Feed, related_name='data', on_delete=models.CASCADE) + feed = AutoOneToOneField(Feed, related_name="data", on_delete=models.CASCADE) feed_tagline = models.CharField(max_length=1024, blank=True, null=True) story_count_history = models.TextField(blank=True, null=True) feed_classifier_counts = models.TextField(blank=True, null=True) popular_tags = models.CharField(max_length=1024, blank=True, null=True) popular_authors = models.CharField(max_length=2048, blank=True, null=True) - + def save(self, *args, **kwargs): if self.feed_tagline and len(self.feed_tagline) >= 1000: self.feed_tagline = self.feed_tagline[:1000] - - try: + + try: super(FeedData, self).save(*args, **kwargs) except (IntegrityError, OperationError): - if hasattr(self, 'id') and self.id: self.delete() + if hasattr(self, "id") and self.id: + self.delete() except DatabaseError as e: # Nothing updated logging.debug(" ---> ~FRNothing updated in FeedData (%s): %s" % (self.feed, e)) @@ -2503,49 +2772,49 @@ def save(self, *args, **kwargs): class MFeedIcon(mongo.Document): - feed_id = mongo.IntField(primary_key=True) - color = mongo.StringField(max_length=6) - data = mongo.StringField() - icon_url = mongo.StringField() - not_found = mongo.BooleanField(default=False) - + feed_id = mongo.IntField(primary_key=True) + color = mongo.StringField(max_length=6) + data = mongo.StringField() + icon_url = mongo.StringField() + not_found = mongo.BooleanField(default=False) + meta = { - 'collection' : 'feed_icons', - 'allow_inheritance' : False, + "collection": "feed_icons", + "allow_inheritance": False, } - + @classmethod def get_feed(cls, feed_id, create=True): try: - feed_icon = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + feed_icon = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(feed_id=feed_id) except cls.DoesNotExist: if create: feed_icon = cls.objects.create(feed_id=feed_id) else: feed_icon = None - + return feed_icon - + def save(self, *args, **kwargs): if self.icon_url: self.icon_url = str(self.icon_url) - try: + try: return super(MFeedIcon, self).save(*args, **kwargs) except (IntegrityError, OperationError): # print "Error on Icon: %s" % e - if hasattr(self, '_id'): self.delete() + if hasattr(self, "_id"): + self.delete() class MFeedPage(mongo.Document): feed_id = mongo.IntField(primary_key=True) page_data = mongo.BinaryField() - + meta = { - 'collection': 'feed_pages', - 'allow_inheritance': False, + "collection": "feed_pages", + "allow_inheritance": False, } - + def page(self): try: return zlib.decompress(self.page_data) @@ -2553,8 +2822,8 @@ def page(self): logging.debug(" ***> Zlib decompress error: %s" % e) self.page_data = None self.save() - return - + return + @classmethod def get_data(cls, feed_id): data = None @@ -2568,8 +2837,8 @@ def get_data(cls, feed_id): logging.debug(" ***> Zlib decompress error: %s" % e) self.page_data = None self.save() - return - + return + if not data: dupe_feed = DuplicateFeed.objects.filter(duplicate_feed_id=feed_id) if dupe_feed: @@ -2582,66 +2851,71 @@ def get_data(cls, feed_id): return data + class MStory(mongo.Document): - '''A feed item''' - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + """A feed item""" + + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - story_latest_content = mongo.StringField() - story_latest_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - original_page_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField() - story_hash = mongo.StringField() - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - comment_count = mongo.IntField() - comment_user_ids = mongo.ListField(mongo.IntField()) - share_count = mongo.IntField() - share_user_ids = mongo.ListField(mongo.IntField()) + story_latest_content = mongo.StringField() + story_latest_content_z = mongo.BinaryField() + original_text_z = mongo.BinaryField() + original_page_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField() + story_hash = mongo.StringField() + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + comment_count = mongo.IntField() + comment_user_ids = mongo.ListField(mongo.IntField()) + share_count = mongo.IntField() + share_user_ids = mongo.ListField(mongo.IntField()) meta = { - 'collection': 'stories', - 'indexes': [('story_feed_id', '-story_date'), - {'fields': ['story_hash'], - 'unique': True, - }], - 'ordering': ['-story_date'], - 'allow_inheritance': False, - 'cascade': False, - 'strict': False, + "collection": "stories", + "indexes": [ + ("story_feed_id", "-story_date"), + { + "fields": ["story_hash"], + "unique": True, + }, + ], + "ordering": ["-story_date"], + "allow_inheritance": False, + "cascade": False, + "strict": False, } - + RE_STORY_HASH = re.compile(r"^(\d{1,10}):(\w{6})$") RE_RS_KEY = re.compile(r"^RS:(\d+):(\d+)$") def __str__(self): content = self.story_content_z if self.story_content_z else "" return f"{self.story_hash}: {self.story_title[:20]} ({len(self.story_content_z) if self.story_content_z else 0} bytes)" - + @property def guid_hash(self): - return hashlib.sha1((self.story_guid).encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1((self.story_guid).encode(encoding="utf-8")).hexdigest()[:6] @classmethod def guid_hash_unsaved(self, guid): - return hashlib.sha1(guid.encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1(guid.encode(encoding="utf-8")).hexdigest()[:6] @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id, self.guid_hash) - + @classmethod def feed_guid_hash_unsaved(cls, feed_id, guid): return "%s:%s" % (feed_id, cls.guid_hash_unsaved(guid)) - + @property def decoded_story_title(self): return html.unescape(self.story_title) @@ -2653,17 +2927,16 @@ def story_content_str(self): story_content = smart_str(zlib.decompress(self.story_content_z)) else: story_content = smart_str(story_content) - + return story_content - def save(self, *args, **kwargs): - story_title_max = MStory._fields['story_title'].max_length - story_content_type_max = MStory._fields['story_content_type'].max_length + story_title_max = MStory._fields["story_title"].max_length + story_content_type_max = MStory._fields["story_content_type"].max_length self.story_hash = self.feed_guid_hash - + self.extract_image_urls() - + if self.story_content: self.story_content_z = zlib.compress(smart_bytes(self.story_content)) self.story_content = None @@ -2677,48 +2950,52 @@ def save(self, *args, **kwargs): self.story_title = self.story_title[:story_title_max] if self.story_content_type and len(self.story_content_type) > story_content_type_max: self.story_content_type = self.story_content_type[:story_content_type_max] - + super(MStory, self).save(*args, **kwargs) - + self.sync_redis() - + return self - + def delete(self, *args, **kwargs): self.remove_from_redis() self.remove_from_search_index() - + super(MStory, self).delete(*args, **kwargs) - + def publish_to_subscribers(self): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish("%s:story" % (self.story_feed_id), '%s,%s' % (self.story_hash, self.story_date.strftime('%s'))) + r.publish( + "%s:story" % (self.story_feed_id), "%s,%s" % (self.story_hash, self.story_date.strftime("%s")) + ) except redis.ConnectionError: - logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (Feed.get_by_id(self.story_feed_id).title[:30],)) - + logging.debug( + " ***> [%-30s] ~BMRedis is unavailable for real-time." + % (Feed.get_by_id(self.story_feed_id).title[:30],) + ) + @classmethod def purge_feed_stories(cls, feed, cutoff, verbose=True): stories = cls.objects(story_feed_id=feed.pk) logging.debug(" ---> Deleting %s stories from %s" % (stories.count(), feed)) - if stories.count() > cutoff*1.25: + if stories.count() > cutoff * 1.25: logging.debug(" ***> ~FRToo many stories in %s, not purging..." % (feed)) return stories.delete() - + @classmethod def index_all_for_search(cls, offset=0): if not offset: SearchStory.create_elasticsearch_mapping(delete=True) - - last_pk = Feed.objects.latest('pk').pk + + last_pk = Feed.objects.latest("pk").pk for f in range(offset, last_pk, 1000): - print(" ---> %s / %s (%.2s%%)" % (f, last_pk, float(f)/last_pk*100)) - feeds = Feed.objects.filter(pk__in=list(range(f, f+1000)), - active=True, - active_subscribers__gte=1)\ - .values_list('pk') - for f, in feeds: + print(" ---> %s / %s (%.2s%%)" % (f, last_pk, float(f) / last_pk * 100)) + feeds = Feed.objects.filter( + pk__in=list(range(f, f + 1000)), active=True, active_subscribers__gte=1 + ).values_list("pk") + for (f,) in feeds: stories = cls.objects.filter(story_feed_id=f) if not len(stories): continue @@ -2730,14 +3007,16 @@ def index_story_for_search(self): story_content = self.story_content or "" if self.story_content_z: story_content = zlib.decompress(self.story_content_z) - SearchStory.index(story_hash=self.story_hash, - story_title=self.story_title, - story_content=prep_for_search(story_content), - story_tags=self.story_tags, - story_author=self.story_author_name, - story_feed_id=self.story_feed_id, - story_date=self.story_date) - + SearchStory.index( + story_hash=self.story_hash, + story_title=self.story_title, + story_content=prep_for_search(story_content), + story_tags=self.story_tags, + story_author=self.story_author_name, + story_feed_id=self.story_feed_id, + story_date=self.story_date, + ) + def remove_from_search_index(self): try: SearchStory.remove(self.story_hash) @@ -2750,50 +3029,50 @@ def trim_feed(cls, cutoff, feed_id=None, feed=None, verbose=True): cutoff = int(cutoff) if not feed_id and not feed: return extra_stories_count - + if not feed_id: feed_id = feed.pk if not feed: feed = feed_id - - stories = cls.objects( - story_feed_id=feed_id - ).only('story_date').order_by('-story_date') - + + stories = cls.objects(story_feed_id=feed_id).only("story_date").order_by("-story_date") + if stories.count() > cutoff: - logging.debug(' ---> [%-30s] ~FMFound %s stories. Trimming to ~SB%s~SN...' % - (str(feed)[:30], stories.count(), cutoff)) + logging.debug( + " ---> [%-30s] ~FMFound %s stories. Trimming to ~SB%s~SN..." + % (str(feed)[:30], stories.count(), cutoff) + ) try: story_trim_date = stories[cutoff].story_date if story_trim_date == stories[0].story_date: # Handle case where every story is the same time story_trim_date = story_trim_date - datetime.timedelta(seconds=1) except IndexError as e: - logging.debug(' ***> [%-30s] ~BRError trimming feed: %s' % (str(feed)[:30], e)) + logging.debug(" ***> [%-30s] ~BRError trimming feed: %s" % (str(feed)[:30], e)) return extra_stories_count - - extra_stories = cls.objects(story_feed_id=feed_id, - story_date__lte=story_trim_date) + + extra_stories = cls.objects(story_feed_id=feed_id, story_date__lte=story_trim_date) extra_stories_count = extra_stories.count() shared_story_count = 0 for story in extra_stories: - if story.share_count: + if story.share_count: shared_story_count += 1 extra_stories_count -= 1 continue story.delete() if verbose: existing_story_count = cls.objects(story_feed_id=feed_id).count() - logging.debug(" ---> Deleted %s stories, %s (%s shared) left." % ( - extra_stories_count, - existing_story_count, - shared_story_count)) + logging.debug( + " ---> Deleted %s stories, %s (%s shared) left." + % (extra_stories_count, existing_story_count, shared_story_count) + ) return extra_stories_count - + @classmethod def find_story(cls, story_feed_id=None, story_id=None, story_hash=None, original_only=False): from apps.social.models import MSharedStory + original_found = False if story_hash: story_id = story_hash @@ -2804,61 +3083,73 @@ def find_story(cls, story_feed_id=None, story_id=None, story_hash=None, original story = cls.objects(id=story_id).limit(1).first() else: story = cls.objects(story_hash=story_hash).limit(1).first() - + if story: original_found = True if not story and not original_only: - story = MSharedStory.objects.filter(story_feed_id=story_feed_id, - story_hash=story_hash).limit(1).first() + story = ( + MSharedStory.objects.filter(story_feed_id=story_feed_id, story_hash=story_hash) + .limit(1) + .first() + ) if not story and not original_only: - story = MStarredStory.objects.filter(story_feed_id=story_feed_id, - story_hash=story_hash).limit(1).first() - + story = ( + MStarredStory.objects.filter(story_feed_id=story_feed_id, story_hash=story_hash) + .limit(1) + .first() + ) + return story, original_found - + @classmethod def find_by_id(cls, story_ids): from apps.social.models import MSharedStory + count = len(story_ids) multiple = isinstance(story_ids, list) or isinstance(story_ids, tuple) - + stories = list(cls.objects(id__in=story_ids)) if len(stories) < count: shared_stories = list(MSharedStory.objects(id__in=story_ids)) stories.extend(shared_stories) - + if not multiple: stories = stories[0] - + return stories - + @classmethod def find_by_story_hashes(cls, story_hashes): from apps.social.models import MSharedStory + count = len(story_hashes) multiple = isinstance(story_hashes, list) or isinstance(story_hashes, tuple) - + stories = list(cls.objects(story_hash__in=story_hashes)) if len(stories) < count: hashes_found = [s.story_hash for s in stories] remaining_hashes = list(set(story_hashes) - set(hashes_found)) - story_feed_ids = [h.split(':')[0] for h in remaining_hashes] - shared_stories = list(MSharedStory.objects(story_feed_id__in=story_feed_ids, - story_hash__in=remaining_hashes)) + story_feed_ids = [h.split(":")[0] for h in remaining_hashes] + shared_stories = list( + MSharedStory.objects(story_feed_id__in=story_feed_ids, story_hash__in=remaining_hashes) + ) stories.extend(shared_stories) - + if not multiple: stories = stories[0] - + return stories - + @classmethod def ensure_story_hash(cls, story_id, story_feed_id): if not cls.RE_STORY_HASH.match(story_id): - story_id = "%s:%s" % (story_feed_id, hashlib.sha1(story_id.encode(encoding='utf-8')).hexdigest()[:6]) - + story_id = "%s:%s" % ( + story_feed_id, + hashlib.sha1(story_id.encode(encoding="utf-8")).hexdigest()[:6], + ) + return story_id - + @classmethod def split_story_hash(cls, story_hash): matches = cls.RE_STORY_HASH.match(story_hash) @@ -2866,7 +3157,7 @@ def split_story_hash(cls, story_hash): groups = matches.groups() return groups[0], groups[1] return None, None - + @classmethod def split_rs_key(cls, rs_key): matches = cls.RE_RS_KEY.match(rs_key) @@ -2874,36 +3165,37 @@ def split_rs_key(cls, rs_key): groups = matches.groups() return groups[0], groups[1] return None, None - + @classmethod def story_hashes(cls, story_ids): story_hashes = [] for story_id in story_ids: story_hash = cls.ensure_story_hash(story_id) - if not story_hash: continue + if not story_hash: + continue story_hashes.append(story_hash) - + return story_hashes - + def sync_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) feed = Feed.get_by_id(self.story_feed_id) if self.id and self.story_date > feed.unread_cutoff: - feed_key = 'F:%s' % self.story_feed_id + feed_key = "F:%s" % self.story_feed_id r.sadd(feed_key, self.story_hash) - r.expire(feed_key, feed.days_of_story_hashes*24*60*60) - - r.zadd('z' + feed_key, { self.story_hash: time.mktime(self.story_date.timetuple()) }) - r.expire('z' + feed_key, feed.days_of_story_hashes*24*60*60) - + r.expire(feed_key, feed.days_of_story_hashes * 24 * 60 * 60) + + r.zadd("z" + feed_key, {self.story_hash: time.mktime(self.story_date.timetuple())}) + r.expire("z" + feed_key, feed.days_of_story_hashes * 24 * 60 * 60) + def remove_from_redis(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) if self.id: - r.srem('F:%s' % self.story_feed_id, self.story_hash) - r.zrem('zF:%s' % self.story_feed_id, self.story_hash) + r.srem("F:%s" % self.story_feed_id, self.story_hash) + r.zrem("zF:%s" % self.story_feed_id, self.story_hash) @classmethod def sync_feed_redis(cls, story_feed_id): @@ -2916,30 +3208,34 @@ def sync_feed_redis(cls, story_feed_id): # r.delete('F:%s' % story_feed_id) # r.delete('zF:%s' % story_feed_id) - logging.info(" ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" % (feed and feed.log_title[:30] or story_feed_id, stories.count())) + logging.info( + " ---> [%-30s] ~FMSyncing ~SB%s~SN stories to redis" + % (feed and feed.log_title[:30] or story_feed_id, stories.count()) + ) p = r.pipeline() for story in stories: story.sync_redis(r=p) p.execute() - + def count_comments(self): from apps.social.models import MSharedStory + params = { - 'story_guid': self.story_guid, - 'story_feed_id': self.story_feed_id, + "story_guid": self.story_guid, + "story_feed_id": self.story_feed_id, } - comments = MSharedStory.objects.filter(has_comments=True, **params).only('user_id') - shares = MSharedStory.objects.filter(**params).only('user_id') + comments = MSharedStory.objects.filter(has_comments=True, **params).only("user_id") + shares = MSharedStory.objects.filter(**params).only("user_id") self.comment_count = comments.count() - self.comment_user_ids = [c['user_id'] for c in comments] + self.comment_user_ids = [c["user_id"] for c in comments] self.share_count = shares.count() - self.share_user_ids = [s['user_id'] for s in shares] + self.share_user_ids = [s["user_id"] for s in shares] self.save() - + def extract_image_urls(self, force=False, text=False): if self.image_urls and not force and not text: return self.image_urls - + story_content = None if not text: story_content = self.story_content_str @@ -2948,7 +3244,7 @@ def extract_image_urls(self, force=False, text=False): story_content = smart_str(zlib.decompress(self.original_text_z)) if not story_content: return - + try: soup = BeautifulSoup(story_content, features="lxml") except UserWarning as e: @@ -2960,27 +3256,29 @@ def extract_image_urls(self, force=False, text=False): else: return - images = soup.findAll('img') - - # Add youtube thumbnail and insert appropriately before/after images. + images = soup.findAll("img") + + # Add youtube thumbnail and insert appropriately before/after images. # Give the Youtube a bit of an edge. - video_thumbnails = soup.findAll('iframe', src=lambda x: x and any(y in x for y in ['youtube.com', 'ytimg.com'])) + video_thumbnails = soup.findAll( + "iframe", src=lambda x: x and any(y in x for y in ["youtube.com", "ytimg.com"]) + ) for video_thumbnail in video_thumbnails: - video_src = video_thumbnail.get('src') - video_id = re.search('.*?youtube.com/embed/([A-Za-z0-9\-_]+)', video_src) + video_src = video_thumbnail.get("src") + video_id = re.search(".*?youtube.com/embed/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?youtube.com/v/([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?youtube.com/v/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?ytimg.com/vi/([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?ytimg.com/vi/([A-Za-z0-9\-_]+)", video_src) if not video_id: - video_id = re.search('.*?youtube.com/watch\?v=([A-Za-z0-9\-_]+)', video_src) + video_id = re.search(".*?youtube.com/watch\?v=([A-Za-z0-9\-_]+)", video_src) if not video_id: logging.debug(f" ***> Couldn't find youtube url in {video_thumbnail}: {video_src}") continue video_img_url = f"https://img.youtube.com/vi/{video_id.groups()[0]}/0.jpg" - iframe_index = story_content.index('= 1024: continue - if 'feedburner.com' in image_url: + if "feedburner.com" in image_url: continue image_url = urllib.parse.urljoin(self.story_permalink, image_url) image_urls.append(image_url) - + if not image_urls: if not text: return self.extract_image_urls(force=force, text=True) else: return - + if text: urls = [] for url in image_urls: - if 'http://' in url[1:] or 'https://' in url[1:]: + if "http://" in url[1:] or "https://" in url[1:]: continue urls.append(url) image_urls = urls - + ordered_image_urls = [] for image_url in list(set(image_urls)): - if 'feedburner' in image_url: + if "feedburner" in image_url: ordered_image_urls.append(image_url) else: ordered_image_urls.insert(0, image_url) image_urls = ordered_image_urls - + if len(image_urls): self.image_urls = [u for u in image_urls if u] else: return - + max_length = MStory.image_urls.field.max_length - while len(''.join(self.image_urls)) > max_length: + while len("".join(self.image_urls)) > max_length: if len(self.image_urls) <= 1: - self.image_urls[0] = self.image_urls[0][:max_length-1] + self.image_urls[0] = self.image_urls[0][: max_length - 1] break else: self.image_urls.pop() @@ -3051,23 +3349,24 @@ def extract_image_urls(self, force=False, text=False): def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z - + if not original_text_z or force: feed = Feed.get_by_id(self.story_feed_id) self.extract_image_urls(force=force, text=False) ti = TextImporter(self, feed=feed, request=request, debug=debug) original_doc = ti.fetch(return_document=True) - original_text = original_doc.get('content') if original_doc else None + original_text = original_doc.get("content") if original_doc else None self.extract_image_urls(force=force, text=True) self.save() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text def fetch_original_page(self, force=False, request=None, debug=False): from apps.rss_feeds.page_importer import PageImporter + if not self.original_page_z or force: feed = Feed.get_by_id(self.story_feed_id) importer = PageImporter(request=request, feed=feed, story=self) @@ -3075,42 +3374,47 @@ def fetch_original_page(self, force=False, request=None, debug=False): else: logging.user(request, "~FYFetching ~FGoriginal~FY story page, ~SBfound.") original_page = zlib.decompress(self.original_page_z) - + return original_page class MStarredStory(mongo.DynamicDocument): """Like MStory, but not inherited due to large overhead of _cls and _type in - mongoengine's inheritance model on every single row.""" - user_id = mongo.IntField(unique_with=('story_guid',)) - starred_date = mongo.DateTimeField() - starred_updated = mongo.DateTimeField() - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + mongoengine's inheritance model on every single row.""" + + user_id = mongo.IntField(unique_with=("story_guid",)) + starred_date = mongo.DateTimeField() + starred_updated = mongo.DateTimeField() + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField() - story_hash = mongo.StringField() - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - user_notes = mongo.StringField() - user_tags = mongo.ListField(mongo.StringField(max_length=128)) - highlights = mongo.ListField(mongo.StringField(max_length=16384)) - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + original_text_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField() + story_hash = mongo.StringField() + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + user_notes = mongo.StringField() + user_tags = mongo.ListField(mongo.StringField(max_length=128)) + highlights = mongo.ListField(mongo.StringField(max_length=16384)) + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) meta = { - 'collection': 'starred_stories', - 'indexes': [('user_id', '-starred_date'), ('user_id', 'story_feed_id'), - ('user_id', 'story_hash'), 'story_feed_id'], - 'ordering': ['-starred_date'], - 'allow_inheritance': False, - 'strict': False, + "collection": "starred_stories", + "indexes": [ + ("user_id", "-starred_date"), + ("user_id", "story_feed_id"), + ("user_id", "story_hash"), + "story_feed_id", + ], + "ordering": ["-starred_date"], + "allow_inheritance": False, + "strict": False, } def __unicode__(self): @@ -3118,11 +3422,9 @@ def __unicode__(self): user = User.objects.get(pk=self.user_id) username = user.username except User.DoesNotExist: - username = '[deleted]' - return "%s: %s (%s)" % (username, - self.story_title[:20], - self.story_feed_id) - + username = "[deleted]" + return "%s: %s (%s)" % (username, self.story_title[:20], self.story_feed_id) + def save(self, *args, **kwargs): if self.story_content: self.story_content_z = zlib.compress(smart_bytes(self.story_content)) @@ -3134,100 +3436,106 @@ def save(self, *args, **kwargs): self.starred_updated = datetime.datetime.now() return super(MStarredStory, self).save(*args, **kwargs) - + @classmethod def find_stories(cls, query, user_id, tag=None, offset=0, limit=25, order="newest"): stories_db = cls.objects( - Q(user_id=user_id) & - (Q(story_title__icontains=query) | - Q(story_author_name__icontains=query) | - Q(story_tags__icontains=query)) + Q(user_id=user_id) + & ( + Q(story_title__icontains=query) + | Q(story_author_name__icontains=query) + | Q(story_tags__icontains=query) + ) ) if tag: stories_db = stories_db.filter(user_tags__contains=tag) - - stories_db = stories_db.order_by('%sstarred_date' % - ('-' if order == "newest" else ""))[offset:offset+limit] + + stories_db = stories_db.order_by("%sstarred_date" % ("-" if order == "newest" else ""))[ + offset : offset + limit + ] stories = Feed.format_stories(stories_db) - + return stories - + @classmethod def find_stories_by_user_tag(cls, user_tag, user_id, offset=0, limit=25): - stories_db = cls.objects( - Q(user_id=user_id), - Q(user_tags__icontains=user_tag) - ).order_by('-starred_date')[offset:offset+limit] + stories_db = cls.objects(Q(user_id=user_id), Q(user_tags__icontains=user_tag)).order_by( + "-starred_date" + )[offset : offset + limit] stories = Feed.format_stories(stories_db) - + return stories @classmethod def trim_old_stories(cls, stories=10, days=90, dryrun=False): print(" ---> Fetching starred story counts...") - stats = settings.MONGODB.newsblur.starred_stories.aggregate([{ - "$group": { - "_id": "$user_id", - "stories": {"$sum": 1}, - }, - }, { - "$match": { - "stories": {"$gte": stories} - }, - }]) + stats = settings.MONGODB.newsblur.starred_stories.aggregate( + [ + { + "$group": { + "_id": "$user_id", + "stories": {"$sum": 1}, + }, + }, + { + "$match": {"stories": {"$gte": stories}}, + }, + ] + ) month_ago = datetime.datetime.now() - datetime.timedelta(days=days) user_ids = list(stats) - user_ids = sorted(user_ids, key=lambda x:x['stories'], reverse=True) + user_ids = sorted(user_ids, key=lambda x: x["stories"], reverse=True) print(" ---> Found %s users with more than %s starred stories" % (len(user_ids), stories)) total = 0 for stat in user_ids: try: - user = User.objects.select_related('profile').get(pk=stat['_id']) + user = User.objects.select_related("profile").get(pk=stat["_id"]) except User.DoesNotExist: user = None - + if user and (user.profile.is_premium or user.profile.last_seen_on > month_ago): continue - - total += stat['stories'] - username = "%s (%s)" % (user and user.username or " - ", stat['_id']) - print(" ---> %19.19s: %-20.20s %s stories" % (user and user.profile.last_seen_on or "Deleted", - username, - stat['stories'])) - if not dryrun and stat['_id']: - cls.objects.filter(user_id=stat['_id']).delete() - elif not dryrun and stat['_id'] == 0: + + total += stat["stories"] + username = "%s (%s)" % (user and user.username or " - ", stat["_id"]) + print( + " ---> %19.19s: %-20.20s %s stories" + % (user and user.profile.last_seen_on or "Deleted", username, stat["stories"]) + ) + if not dryrun and stat["_id"]: + cls.objects.filter(user_id=stat["_id"]).delete() + elif not dryrun and stat["_id"] == 0: print(" ---> Deleting unstarred stories (user_id = 0)") - cls.objects.filter(user_id=stat['_id']).delete() - - + cls.objects.filter(user_id=stat["_id"]).delete() + print(" ---> Deleted %s stories in total." % total) @property def guid_hash(self): - return hashlib.sha1(self.story_guid.encode(encoding='utf-8')).hexdigest()[:6] + return hashlib.sha1(self.story_guid.encode(encoding="utf-8")).hexdigest()[:6] @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id or "0", self.guid_hash) - + def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z feed = Feed.get_by_id(self.story_feed_id) - + if not original_text_z or force: ti = TextImporter(self, feed=feed, request=request, debug=debug) original_text = ti.fetch() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text - + def fetch_original_page(self, force=False, request=None, debug=False): return None - + + class MStarredStoryCounts(mongo.Document): user_id = mongo.IntField() tag = mongo.StringField(max_length=128) @@ -3237,12 +3545,12 @@ class MStarredStoryCounts(mongo.Document): count = mongo.IntField(default=0) meta = { - 'collection': 'starred_stories_counts', - 'indexes': ['user_id'], - 'ordering': ['tag'], - 'allow_inheritance': False, + "collection": "starred_stories_counts", + "indexes": ["user_id"], + "ordering": ["tag"], + "allow_inheritance": False, } - + def __unicode__(self): if self.tag: return "Tag: %s (%s)" % (self.tag, self.count) @@ -3250,69 +3558,74 @@ def __unicode__(self): return "Feed: %s (%s)" % (self.feed_id, self.count) elif self.is_highlights: return "Highlights: %s (%s)" % (self.is_highlights, self.count) - + return "%s/%s/%s" % (self.tag, self.feed_id, self.is_highlights) @property def rss_url(self, secret_token=None): if self.feed_id: return - + if not secret_token: - user = User.objects.select_related('profile').get(pk=self.user_id) + user = User.objects.select_related("profile").get(pk=self.user_id) secret_token = user.profile.secret_token - + slug = self.slug if self.slug else "" if not self.slug and self.tag: slug = slugify(self.tag) self.slug = slug self.save() - return "%s/reader/starred_rss/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, - secret_token, slug) - + return "%s/reader/starred_rss/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, secret_token, slug) + @classmethod def user_counts(cls, user_id, include_total=False, try_counting=True): counts = cls.objects.filter(user_id=user_id) - counts = sorted([{'tag': c.tag, - 'count': c.count, - 'is_highlights': c.is_highlights, - 'feed_address': c.rss_url, - 'active': True, - 'feed_id': c.feed_id} - for c in counts], - key=lambda x: (x.get('tag', '') or '').lower()) - + counts = sorted( + [ + { + "tag": c.tag, + "count": c.count, + "is_highlights": c.is_highlights, + "feed_address": c.rss_url, + "active": True, + "feed_id": c.feed_id, + } + for c in counts + ], + key=lambda x: (x.get("tag", "") or "").lower(), + ) + total = 0 feed_total = 0 for c in counts: - if not c['tag'] and not c['feed_id'] and not c['is_highlights']: - total = c['count'] - if c['feed_id']: - feed_total += c['count'] - + if not c["tag"] and not c["feed_id"] and not c["is_highlights"]: + total = c["count"] + if c["feed_id"]: + feed_total += c["count"] + if try_counting and (total != feed_total or not len(counts)): user = User.objects.get(pk=user_id) - logging.user(user, "~FC~SBCounting~SN saved stories (%s total vs. %s counted)..." % - (total, feed_total)) + logging.user( + user, "~FC~SBCounting~SN saved stories (%s total vs. %s counted)..." % (total, feed_total) + ) cls.count_for_user(user_id) - return cls.user_counts(user_id, include_total=include_total, - try_counting=False) - + return cls.user_counts(user_id, include_total=include_total, try_counting=False) + if include_total: return counts, total return counts - + @classmethod def schedule_count_tags_for_user(cls, user_id): ScheduleCountTagsForUser.apply_async(kwargs=dict(user_id=user_id)) - + @classmethod def count_for_user(cls, user_id, total_only=False): user_tags = [] user_feeds = [] highlights = 0 - + if not total_only: cls.objects(user_id=user_id).delete() try: @@ -3323,45 +3636,47 @@ def count_for_user(cls, user_id, total_only=False): logging.debug(" ---> ~FBOperationError on mongo: ~SB%s" % e) total_stories_count = MStarredStory.objects(user_id=user_id).count() - cls.objects(user_id=user_id, tag=None, feed_id=None, is_highlights=None).update_one(set__count=total_stories_count, - upsert=True) + cls.objects(user_id=user_id, tag=None, feed_id=None, is_highlights=None).update_one( + set__count=total_stories_count, upsert=True + ) return dict(total=total_stories_count, tags=user_tags, feeds=user_feeds, highlights=highlights) @classmethod def count_tags_for_user(cls, user_id): - all_tags = MStarredStory.objects(user_id=user_id, - user_tags__exists=True).item_frequencies('user_tags') - user_tags = sorted([(k, v) for k, v in list(all_tags.items()) if int(v) > 0 and k], - key=lambda x: x[0].lower(), - reverse=True) - + all_tags = MStarredStory.objects(user_id=user_id, user_tags__exists=True).item_frequencies( + "user_tags" + ) + user_tags = sorted( + [(k, v) for k, v in list(all_tags.items()) if int(v) > 0 and k], + key=lambda x: x[0].lower(), + reverse=True, + ) + for tag, count in list(dict(user_tags).items()): - cls.objects(user_id=user_id, tag=tag, slug=slugify(tag)).update_one(set__count=count, - upsert=True) - + cls.objects(user_id=user_id, tag=tag, slug=slugify(tag)).update_one(set__count=count, upsert=True) + return user_tags - + @classmethod def count_highlights_for_user(cls, user_id): - highlighted_count = MStarredStory.objects(user_id=user_id, - highlights__exists=True, - __raw__={"$where": "this.highlights.length > 0"}).count() + highlighted_count = MStarredStory.objects( + user_id=user_id, highlights__exists=True, __raw__={"$where": "this.highlights.length > 0"} + ).count() if highlighted_count > 0: - cls.objects(user_id=user_id, - is_highlights=True, - slug="highlights" - ).update_one(set__count=highlighted_count, upsert=True) + cls.objects(user_id=user_id, is_highlights=True, slug="highlights").update_one( + set__count=highlighted_count, upsert=True + ) else: cls.objects(user_id=user_id, is_highlights=True, slug="highlights").delete() - + return highlighted_count - + @classmethod def count_feeds_for_user(cls, user_id): - all_feeds = MStarredStory.objects(user_id=user_id).item_frequencies('story_feed_id') + all_feeds = MStarredStory.objects(user_id=user_id).item_frequencies("story_feed_id") user_feeds = dict([(k, v) for k, v in list(all_feeds.items()) if v]) - + # Clean up None'd and 0'd feed_ids, so they can be counted against the total if user_feeds.get(None, False): user_feeds[0] = user_feeds.get(0, 0) @@ -3370,26 +3685,26 @@ def count_feeds_for_user(cls, user_id): if user_feeds.get(0, False): user_feeds[-1] = user_feeds.get(0, 0) del user_feeds[0] - + too_many_feeds = False if len(user_feeds) < 1000 else True for feed_id, count in list(user_feeds.items()): - if too_many_feeds and count <= 1: continue - cls.objects(user_id=user_id, - feed_id=feed_id, - slug="feed:%s" % feed_id).update_one(set__count=count, - upsert=True) - + if too_many_feeds and count <= 1: + continue + cls.objects(user_id=user_id, feed_id=feed_id, slug="feed:%s" % feed_id).update_one( + set__count=count, upsert=True + ) + return user_feeds - + @classmethod def adjust_count(cls, user_id, feed_id=None, tag=None, highlights=None, amount=0): params = dict(user_id=user_id) if feed_id: - params['feed_id'] = feed_id + params["feed_id"] = feed_id if tag: - params['tag'] = tag + params["tag"] = tag if highlights: - params['is_highlights'] = True + params["is_highlights"] = True cls.objects(**params).update_one(inc__count=amount, upsert=True) try: @@ -3399,6 +3714,7 @@ def adjust_count(cls, user_id, feed_id=None, tag=None, highlights=None, amount=0 if story_count and story_count.count <= 0: story_count.delete() + class MSavedSearch(mongo.Document): user_id = mongo.IntField() query = mongo.StringField(max_length=1024) @@ -3406,58 +3722,61 @@ class MSavedSearch(mongo.Document): slug = mongo.StringField(max_length=128) meta = { - 'collection': 'saved_searches', - 'indexes': ['user_id', - {'fields': ['user_id', 'feed_id', 'query'], - 'unique': True, - }], - 'ordering': ['query'], - 'allow_inheritance': False, + "collection": "saved_searches", + "indexes": [ + "user_id", + { + "fields": ["user_id", "feed_id", "query"], + "unique": True, + }, + ], + "ordering": ["query"], + "allow_inheritance": False, } @property def rss_url(self, secret_token=None): if not secret_token: - user = User.objects.select_related('profile').get(pk=self.user_id) + user = User.objects.select_related("profile").get(pk=self.user_id) secret_token = user.profile.secret_token - + slug = self.slug if self.slug else "" - return "%s/reader/saved_search/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, - secret_token, slug) - + return "%s/reader/saved_search/%s/%s/%s" % (settings.NEWSBLUR_URL, self.user_id, secret_token, slug) + @classmethod def user_searches(cls, user_id): searches = cls.objects.filter(user_id=user_id) - searches = sorted([{'query': s.query, - 'feed_address': s.rss_url, - 'feed_id': s.feed_id, - 'active': True, - } for s in searches], - key=lambda x: (x.get('query', '') or '').lower()) + searches = sorted( + [ + { + "query": s.query, + "feed_address": s.rss_url, + "feed_id": s.feed_id, + "active": True, + } + for s in searches + ], + key=lambda x: (x.get("query", "") or "").lower(), + ) return searches - + @classmethod def save_search(cls, user_id, feed_id, query): user = User.objects.get(pk=user_id) - params = dict(user_id=user_id, - feed_id=feed_id, - query=query, - slug=slugify(query)) + params = dict(user_id=user_id, feed_id=feed_id, query=query, slug=slugify(query)) try: saved_search = cls.objects.get(**params) logging.user(user, "~FRSaved search already exists: ~SB%s" % query) except cls.DoesNotExist: logging.user(user, "~FCCreating a saved search: ~SB%s~SN/~SB%s" % (feed_id, query)) saved_search = cls.objects.create(**params) - + return saved_search - + @classmethod def delete_search(cls, user_id, feed_id, query): user = User.objects.get(pk=user_id) - params = dict(user_id=user_id, - feed_id=feed_id, - query=query) + params = dict(user_id=user_id, feed_id=feed_id, query=query) try: saved_search = cls.objects.get(**params) logging.user(user, "~FCDeleting saved search: ~SB%s" % query) @@ -3465,89 +3784,90 @@ def delete_search(cls, user_id, feed_id, query): except cls.DoesNotExist: logging.user(user, "~FRCan't delete saved search, missing: ~SB%s~SN/~SB%s" % (feed_id, query)) except cls.MultipleObjectsReturned: - logging.user(user, "~FRFound multiple saved searches, deleting: ~SB%s~SN/~SB%s" % (feed_id, query)) + logging.user( + user, "~FRFound multiple saved searches, deleting: ~SB%s~SN/~SB%s" % (feed_id, query) + ) cls.objects(**params).delete() - - + + class MFetchHistory(mongo.Document): feed_id = mongo.IntField(unique=True) feed_fetch_history = mongo.DynamicField() page_fetch_history = mongo.DynamicField() push_history = mongo.DynamicField() raw_feed_history = mongo.DynamicField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'fetch_history', - 'allow_inheritance': False, + "db_alias": "nbanalytics", + "collection": "fetch_history", + "allow_inheritance": False, } @classmethod def feed(cls, feed_id, timezone=None, fetch_history=None): if not fetch_history: try: - fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get( + feed_id=feed_id + ) except cls.DoesNotExist: fetch_history = cls.objects.create(feed_id=feed_id) history = {} - for fetch_type in ['feed_fetch_history', 'page_fetch_history', 'push_history']: + for fetch_type in ["feed_fetch_history", "page_fetch_history", "push_history"]: history[fetch_type] = getattr(fetch_history, fetch_type) if not history[fetch_type]: history[fetch_type] = [] for f, fetch in enumerate(history[fetch_type]): - date_key = 'push_date' if fetch_type == 'push_history' else 'fetch_date' + date_key = "push_date" if fetch_type == "push_history" else "fetch_date" history[fetch_type][f] = { - date_key: localtime_for_timezone(fetch[0], - timezone).strftime("%Y-%m-%d %H:%M:%S"), - 'status_code': fetch[1], - 'message': fetch[2] + date_key: localtime_for_timezone(fetch[0], timezone).strftime("%Y-%m-%d %H:%M:%S"), + "status_code": fetch[1], + "message": fetch[2], } return history - + @classmethod def add(cls, feed_id, fetch_type, date=None, message=None, code=None, exception=None): if not date: date = datetime.datetime.now() try: - fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(feed_id=feed_id) + fetch_history = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(feed_id=feed_id) except cls.DoesNotExist: fetch_history = cls.objects.create(feed_id=feed_id) - - if fetch_type == 'feed': + + if fetch_type == "feed": history = fetch_history.feed_fetch_history or [] - elif fetch_type == 'page': + elif fetch_type == "page": history = fetch_history.page_fetch_history or [] - elif fetch_type == 'push': + elif fetch_type == "push": history = fetch_history.push_history or [] - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": history = fetch_history.raw_feed_history or [] history = [[date, code, message]] + history any_exceptions = any([c for d, c, m in history if c not in [200, 304]]) if any_exceptions: history = history[:25] - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": history = history[:10] else: history = history[:5] - if fetch_type == 'feed': + if fetch_type == "feed": fetch_history.feed_fetch_history = history - elif fetch_type == 'page': + elif fetch_type == "page": fetch_history.page_fetch_history = history - elif fetch_type == 'push': + elif fetch_type == "push": fetch_history.push_history = history - elif fetch_type == 'raw_feed': + elif fetch_type == "raw_feed": fetch_history.raw_feed_history = history - + fetch_history.save() - - if fetch_type == 'feed': - RStats.add('feed_fetch') - + + if fetch_type == "feed": + RStats.add("feed_fetch") + return cls.feed(feed_id, fetch_history=fetch_history) @@ -3555,33 +3875,34 @@ class DuplicateFeed(models.Model): duplicate_address = models.CharField(max_length=764, db_index=True) duplicate_link = models.CharField(max_length=764, null=True, db_index=True) duplicate_feed_id = models.CharField(max_length=255, null=True, db_index=True) - feed = models.ForeignKey(Feed, related_name='duplicate_addresses', on_delete=models.CASCADE) - + feed = models.ForeignKey(Feed, related_name="duplicate_addresses", on_delete=models.CASCADE) + def __str__(self): return "%s: %s / %s" % (self.feed, self.duplicate_address, self.duplicate_link) - + def canonical(self): return { - 'duplicate_address': self.duplicate_address, - 'duplicate_link': self.duplicate_link, - 'duplicate_feed_id': self.duplicate_feed_id, - 'feed_id': self.feed_id + "duplicate_address": self.duplicate_address, + "duplicate_link": self.duplicate_link, + "duplicate_feed_id": self.duplicate_feed_id, + "feed_id": self.feed_id, } - + def save(self, *args, **kwargs): - max_address = DuplicateFeed._meta.get_field('duplicate_address').max_length + max_address = DuplicateFeed._meta.get_field("duplicate_address").max_length if len(self.duplicate_address) > max_address: self.duplicate_address = self.duplicate_address[:max_address] - max_link = DuplicateFeed._meta.get_field('duplicate_link').max_length + max_link = DuplicateFeed._meta.get_field("duplicate_link").max_length if self.duplicate_link and len(self.duplicate_link) > max_link: self.duplicate_link = self.duplicate_link[:max_link] - + super(DuplicateFeed, self).save(*args, **kwargs) + def merge_feeds(original_feed_id, duplicate_feed_id, force=False): from apps.reader.models import UserSubscription from apps.social.models import MSharedStory - + if original_feed_id == duplicate_feed_id: logging.info(" ***> Merging the same feed. Ignoring...") return original_feed_id @@ -3591,7 +3912,7 @@ def merge_feeds(original_feed_id, duplicate_feed_id, force=False): except Feed.DoesNotExist: logging.info(" ***> Already deleted feed: %s" % duplicate_feed_id) return original_feed_id - + heavier_dupe = original_feed.num_subscribers < duplicate_feed.num_subscribers branched_original = original_feed.branch_from_feed and not duplicate_feed.branch_from_feed if (heavier_dupe or branched_original) and not force: @@ -3599,74 +3920,86 @@ def merge_feeds(original_feed_id, duplicate_feed_id, force=False): original_feed_id, duplicate_feed_id = duplicate_feed_id, original_feed_id if branched_original: original_feed.feed_address = duplicate_feed.feed_address - - logging.info(" ---> Feed: [%s - %s] %s - %s" % (original_feed_id, duplicate_feed_id, - original_feed, original_feed.feed_link)) - logging.info(" Orig ++> %s: (%s subs) %s / %s %s" % (original_feed.pk, - original_feed.num_subscribers, - original_feed.feed_address, - original_feed.feed_link, - " [B: %s]" % original_feed.branch_from_feed.pk if original_feed.branch_from_feed else "")) - logging.info(" Dupe --> %s: (%s subs) %s / %s %s" % (duplicate_feed.pk, - duplicate_feed.num_subscribers, - duplicate_feed.feed_address, - duplicate_feed.feed_link, - " [B: %s]" % duplicate_feed.branch_from_feed.pk if duplicate_feed.branch_from_feed else "")) + + logging.info( + " ---> Feed: [%s - %s] %s - %s" + % (original_feed_id, duplicate_feed_id, original_feed, original_feed.feed_link) + ) + logging.info( + " Orig ++> %s: (%s subs) %s / %s %s" + % ( + original_feed.pk, + original_feed.num_subscribers, + original_feed.feed_address, + original_feed.feed_link, + " [B: %s]" % original_feed.branch_from_feed.pk if original_feed.branch_from_feed else "", + ) + ) + logging.info( + " Dupe --> %s: (%s subs) %s / %s %s" + % ( + duplicate_feed.pk, + duplicate_feed.num_subscribers, + duplicate_feed.feed_address, + duplicate_feed.feed_link, + " [B: %s]" % duplicate_feed.branch_from_feed.pk if duplicate_feed.branch_from_feed else "", + ) + ) original_feed.branch_from_feed = None - - user_subs = UserSubscription.objects.filter(feed=duplicate_feed).order_by('-pk') + + user_subs = UserSubscription.objects.filter(feed=duplicate_feed).order_by("-pk") for user_sub in user_subs: user_sub.switch_feed(original_feed, duplicate_feed) - def delete_story_feed(model, feed_field='feed_id'): + def delete_story_feed(model, feed_field="feed_id"): duplicate_stories = model.objects(**{feed_field: duplicate_feed.pk}) # if duplicate_stories.count(): # logging.info(" ---> Deleting %s %s" % (duplicate_stories.count(), model)) duplicate_stories.delete() - - delete_story_feed(MStory, 'story_feed_id') - delete_story_feed(MFeedPage, 'feed_id') + + delete_story_feed(MStory, "story_feed_id") + delete_story_feed(MFeedPage, "feed_id") try: DuplicateFeed.objects.create( duplicate_address=duplicate_feed.feed_address, duplicate_link=duplicate_feed.feed_link, duplicate_feed_id=duplicate_feed.pk, - feed=original_feed + feed=original_feed, ) except (IntegrityError, OperationError) as e: logging.info(" ***> Could not save DuplicateFeed: %s" % e) - + # Switch this dupe feed's dupe feeds over to the new original. duplicate_feeds_duplicate_feeds = DuplicateFeed.objects.filter(feed=duplicate_feed) for dupe_feed in duplicate_feeds_duplicate_feeds: dupe_feed.feed = original_feed dupe_feed.duplicate_feed_id = duplicate_feed.pk dupe_feed.save() - - logging.debug(' ---> Dupe subscribers (%s): %s, Original subscribers (%s): %s' % - (duplicate_feed.pk, duplicate_feed.num_subscribers, - original_feed.pk, original_feed.num_subscribers)) + + logging.debug( + " ---> Dupe subscribers (%s): %s, Original subscribers (%s): %s" + % (duplicate_feed.pk, duplicate_feed.num_subscribers, original_feed.pk, original_feed.num_subscribers) + ) if duplicate_feed.pk != original_feed.pk: duplicate_feed.delete() else: logging.debug(" ***> Duplicate feed is the same as original feed. Panic!") - logging.debug(' ---> Deleted duplicate feed: %s/%s' % (duplicate_feed, duplicate_feed_id)) + logging.debug(" ---> Deleted duplicate feed: %s/%s" % (duplicate_feed, duplicate_feed_id)) original_feed.branch_from_feed = None original_feed.count_subscribers() original_feed.save() - logging.debug(' ---> Now original subscribers: %s' % - (original_feed.num_subscribers)) - - + logging.debug(" ---> Now original subscribers: %s" % (original_feed.num_subscribers)) + MSharedStory.switch_feed(original_feed_id, duplicate_feed_id) - + return original_feed_id - + + def rewrite_folders(folders, original_feed, duplicate_feed): new_folders = [] - + for k, folder in enumerate(folders): if isinstance(folder, int): if folder == duplicate_feed.pk: diff --git a/apps/rss_feeds/page_importer.py b/apps/rss_feeds/page_importer.py index a3d2f321e4..58b11cda30 100644 --- a/apps/rss_feeds/page_importer.py +++ b/apps/rss_feeds/page_importer.py @@ -26,51 +26,55 @@ # from utils.feed_functions import mail_feed_error_to_admin BROKEN_PAGES = [ - 'tag:', - 'info:', - 'uuid:', - 'urn:', - '[]', + "tag:", + "info:", + "uuid:", + "urn:", + "[]", ] # Also change in reader_utils.js. BROKEN_PAGE_URLS = [ - 'nytimes.com', - 'github.com', - 'washingtonpost.com', - 'stackoverflow.com', - 'stackexchange.com', - 'twitter.com', - 'rankexploits', - 'gamespot.com', - 'espn.com', - 'royalroad.com', + "nytimes.com", + "github.com", + "washingtonpost.com", + "stackoverflow.com", + "stackexchange.com", + "twitter.com", + "rankexploits", + "gamespot.com", + "espn.com", + "royalroad.com", ] + class PageImporter(object): - def __init__(self, feed, story=None, request=None): self.feed = feed self.story = story self.request = request - + @property def headers(self): return { - 'User-Agent': 'NewsBlur Page Fetcher - %s subscriber%s - %s %s' % ( + "User-Agent": "NewsBlur Page Fetcher - %s subscriber%s - %s %s" + % ( self.feed.num_subscribers, - 's' if self.feed.num_subscribers != 1 else '', + "s" if self.feed.num_subscribers != 1 else "", self.feed.permalink, self.feed.fake_user_agent, ), } - + def fetch_page(self, urllib_fallback=False, requests_exception=None): try: self.fetch_page_timeout(urllib_fallback=urllib_fallback, requests_exception=requests_exception) except TimeoutError: - logging.user(self.request, ' ***> [%-30s] ~FBPage fetch ~SN~FRfailed~FB due to timeout' % (self.feed.log_title[:30])) - + logging.user( + self.request, + " ***> [%-30s] ~FBPage fetch ~SN~FRfailed~FB due to timeout" % (self.feed.log_title[:30]), + ) + @timelimit(10) def fetch_page_timeout(self, urllib_fallback=False, requests_exception=None): html = None @@ -79,8 +83,8 @@ def fetch_page_timeout(self, urllib_fallback=False, requests_exception=None): self.save_no_page(reason="No feed link") return - if feed_link.startswith('www'): - self.feed.feed_link = 'http://' + feed_link + if feed_link.startswith("www"): + self.feed.feed_link = "http://" + feed_link try: if any(feed_link.startswith(s) for s in BROKEN_PAGES): self.save_no_page(reason="Broken page") @@ -88,36 +92,45 @@ def fetch_page_timeout(self, urllib_fallback=False, requests_exception=None): elif any(s in feed_link.lower() for s in BROKEN_PAGE_URLS): self.save_no_page(reason="Banned") return - elif feed_link.startswith('http'): + elif feed_link.startswith("http"): if urllib_fallback: request = urllib.request.Request(feed_link, headers=self.headers) response = urllib.request.urlopen(request) - time.sleep(0.01) # Grrr, GIL. - data = response.read().decode(response.headers.get_content_charset() or 'utf-8') + time.sleep(0.01) # Grrr, GIL. + data = response.read().decode(response.headers.get_content_charset() or "utf-8") else: try: response = requests.get(feed_link, headers=self.headers, timeout=10) response.connection.close() except requests.exceptions.TooManyRedirects: response = requests.get(feed_link, timeout=10) - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, TypeError, - requests.adapters.ReadTimeout) as e: - logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + TypeError, + requests.adapters.ReadTimeout, + ) as e: + logging.debug( + " ***> [%-30s] Page fetch failed using requests: %s" + % (self.feed.log_title[:30], e) + ) self.save_no_page(reason="Page fetch failed") return data = response.text - if response.encoding and response.encoding.lower() != 'utf-8': + if response.encoding and response.encoding.lower() != "utf-8": logging.debug(f" -> ~FBEncoding is {response.encoding}, re-encoding...") try: - data = data.encode('utf-8').decode('utf-8') + data = data.encode("utf-8").decode("utf-8") except (LookupError, UnicodeEncodeError): logging.debug(f" -> ~FRRe-encoding failed!") pass else: try: - data = open(feed_link, 'r').read() + data = open(feed_link, "r").read() except IOError: - self.feed.feed_link = 'http://' + feed_link + self.feed.feed_link = "http://" + feed_link self.fetch_page(urllib_fallback=True) return if data: @@ -130,40 +143,45 @@ def fetch_page_timeout(self, urllib_fallback=False, requests_exception=None): else: self.save_no_page(reason="No data found") return - except (ValueError, urllib.error.URLError, http.client.BadStatusLine, http.client.InvalidURL, - requests.exceptions.ConnectionError) as e: - logging.debug(' ***> [%-30s] Page fetch failed: %s' % (self.feed.log_title[:30], e)) + except ( + ValueError, + urllib.error.URLError, + http.client.BadStatusLine, + http.client.InvalidURL, + requests.exceptions.ConnectionError, + ) as e: + logging.debug(" ***> [%-30s] Page fetch failed: %s" % (self.feed.log_title[:30], e)) self.feed.save_page_history(401, "Bad URL", e) try: fp = feedparser.parse(self.feed.feed_address) except (urllib.error.HTTPError, urllib.error.URLError) as e: return html - feed_link = fp.feed.get('link', "") + feed_link = fp.feed.get("link", "") self.feed.save() - except (http.client.IncompleteRead) as e: - logging.debug(' ***> [%-30s] Page fetch failed: %s' % (self.feed.log_title[:30], e)) + except http.client.IncompleteRead as e: + logging.debug(" ***> [%-30s] Page fetch failed: %s" % (self.feed.log_title[:30], e)) self.feed.save_page_history(500, "IncompleteRead", e) - except (requests.exceptions.RequestException, - requests.packages.urllib3.exceptions.HTTPError) as e: - logging.debug(' ***> [%-30s] Page fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except (requests.exceptions.RequestException, requests.packages.urllib3.exceptions.HTTPError) as e: + logging.debug( + " ***> [%-30s] Page fetch failed using requests: %s" % (self.feed.log_title[:30], e) + ) # mail_feed_error_to_admin(self.feed, e, local_vars=locals()) return self.fetch_page(urllib_fallback=True, requests_exception=e) except Exception as e: - logging.debug('[%d] ! -------------------------' % (self.feed.id,)) + logging.debug("[%d] ! -------------------------" % (self.feed.id,)) tb = traceback.format_exc() logging.debug(tb) - logging.debug('[%d] ! -------------------------' % (self.feed.id,)) + logging.debug("[%d] ! -------------------------" % (self.feed.id,)) self.feed.save_page_history(500, "Error", tb) # mail_feed_error_to_admin(self.feed, e, local_vars=locals()) - if (not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and - settings.SENTRY_DSN): + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() if not urllib_fallback: self.fetch_page(urllib_fallback=True) else: self.feed.save_page_history(200, "OK") - + return html def fetch_story(self): @@ -174,62 +192,75 @@ def fetch_story(self): logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal story~FY: timed out") except requests.exceptions.TooManyRedirects: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal story~FY: too many redirects") - + return html @timelimit(10) def _fetch_story(self): html = None story_permalink = self.story.story_permalink - + if not self.feed: return if any(story_permalink.startswith(s) for s in BROKEN_PAGES): return if any(s in story_permalink.lower() for s in BROKEN_PAGE_URLS): return - if not story_permalink.startswith('http'): + if not story_permalink.startswith("http"): return try: response = requests.get(story_permalink, headers=self.headers, timeout=10) response.connection.close() - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, - requests.exceptions.ConnectionError, - requests.exceptions.TooManyRedirects, - requests.adapters.ReadTimeout) as e: + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + requests.exceptions.ConnectionError, + requests.exceptions.TooManyRedirects, + requests.adapters.ReadTimeout, + ) as e: try: response = requests.get(story_permalink, timeout=10) - except (AttributeError, SocketError, OpenSSLError, PyAsn1Error, - requests.exceptions.ConnectionError, - requests.exceptions.TooManyRedirects, - requests.adapters.ReadTimeout) as e: - logging.debug(' ***> [%-30s] Original story fetch failed using requests: %s' % (self.feed.log_title[:30], e)) + except ( + AttributeError, + SocketError, + OpenSSLError, + PyAsn1Error, + requests.exceptions.ConnectionError, + requests.exceptions.TooManyRedirects, + requests.adapters.ReadTimeout, + ) as e: + logging.debug( + " ***> [%-30s] Original story fetch failed using requests: %s" + % (self.feed.log_title[:30], e) + ) return # try: data = response.text # except (LookupError, TypeError): # data = response.content - # import pdb; pdb.set_trace() + # import pdb; pdb.set_trace() - if response.encoding and response.encoding.lower() != 'utf-8': + if response.encoding and response.encoding.lower() != "utf-8": logging.debug(f" -> ~FBEncoding is {response.encoding}, re-encoding...") try: - data = data.encode('utf-8').decode('utf-8') + data = data.encode("utf-8").decode("utf-8") except (LookupError, UnicodeEncodeError): logging.debug(f" -> ~FRRe-encoding failed!") pass if data: - data = data.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - data = data.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + data = data.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + data = data.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 html = self.rewrite_page(data) if not html: return self.save_story(html) - + return html - + def save_story(self, html): self.story.original_page_z = zlib.compress(smart_bytes(html)) try: @@ -237,77 +268,83 @@ def save_story(self, html): except NotUniqueError: pass - def save_no_page(self, reason=None): - logging.debug(' ---> [%-30s] ~FYNo original page: %s / %s' % (self.feed.log_title[:30], reason, self.feed.feed_link)) + logging.debug( + " ---> [%-30s] ~FYNo original page: %s / %s" + % (self.feed.log_title[:30], reason, self.feed.feed_link) + ) self.feed.has_page = False self.feed.save() self.feed.save_page_history(404, f"Feed has no original page: {reason}") def rewrite_page(self, response): - BASE_RE = re.compile(r'', re.I) + BASE_RE = re.compile(r"", re.I) base_code = '' % (self.feed.feed_link,) - html = BASE_RE.sub(' '+base_code, response) - - if ' " + base_code, response) + + if " tags. You know, like # Google Analytics. Ugh. - + FIND_RE = re.compile(r'\b(href|src)\s*=\s*("[^"]*"|\'[^\']*\'|[^"\'<>=\s]+)') ret = [] last_end = 0 - + for match in FIND_RE.finditer(document): url = match.group(2) if url[0] in "\"'": url = url.strip(url[0]) parsed = urllib.parse.urlparse(url) - if parsed.scheme == parsed.netloc == '': #relative to domain + if parsed.scheme == parsed.netloc == "": # relative to domain url = urllib.parse.urljoin(self.feed.feed_link, url) - ret.append(document[last_end:match.start(2)]) + ret.append(document[last_end : match.start(2)]) ret.append('"%s"' % (url,)) last_end = match.end(2) ret.append(document[last_end:]) - - return ''.join(ret) - + + return "".join(ret) + def save_page(self, html): saved = False - + if not html or len(html) < 100: return - - if settings.BACKED_BY_AWS.get('pages_on_node'): + + if settings.BACKED_BY_AWS.get("pages_on_node"): saved = self.save_page_node(html) - if saved and self.feed.s3_page and settings.BACKED_BY_AWS.get('pages_on_s3'): + if saved and self.feed.s3_page and settings.BACKED_BY_AWS.get("pages_on_s3"): self.delete_page_s3() - - if settings.BACKED_BY_AWS.get('pages_on_s3') and not saved: + + if settings.BACKED_BY_AWS.get("pages_on_s3") and not saved: saved = self.save_page_s3(html) - + if not saved: try: feed_page = MFeedPage.objects.get(feed_id=self.feed.pk) # feed_page.page_data = html.encode('utf-8') if feed_page.page() == html: - logging.debug(' ---> [%-30s] ~FYNo change in page data: %s' % (self.feed.log_title[:30], self.feed.feed_link)) + logging.debug( + " ---> [%-30s] ~FYNo change in page data: %s" + % (self.feed.log_title[:30], self.feed.feed_link) + ) else: # logging.debug(' ---> [%-30s] ~FYChange in page data: %s (%s/%s %s/%s)' % (self.feed.log_title[:30], self.feed.feed_link, type(html), type(feed_page.page()), len(html), len(feed_page.page()))) feed_page.page_data = zlib.compress(smart_bytes(html)) feed_page.save() except MFeedPage.DoesNotExist: - feed_page = MFeedPage.objects.create(feed_id=self.feed.pk, - page_data=zlib.compress(smart_bytes(html))) + feed_page = MFeedPage.objects.create( + feed_id=self.feed.pk, page_data=zlib.compress(smart_bytes(html)) + ) return feed_page - + def save_page_node(self, html): domain = "node-page.service.consul:8008" if settings.DOCKERBUILD: @@ -317,42 +354,47 @@ def save_page_node(self, html): self.feed.pk, ) compressed_html = zlib.compress(smart_bytes(html)) - response = requests.post(url, files={ - 'original_page': compressed_html, - # 'original_page': html, - }) + response = requests.post( + url, + files={ + "original_page": compressed_html, + # 'original_page': html, + }, + ) if response.status_code == 200: return True else: - logging.debug(' ---> [%-30s] ~FRFailed to save page to node: %s (%s bytes)' % (self.feed.log_title[:30], response.status_code, len(compressed_html))) + logging.debug( + " ---> [%-30s] ~FRFailed to save page to node: %s (%s bytes)" + % (self.feed.log_title[:30], response.status_code, len(compressed_html)) + ) - def save_page_s3(self, html): - s3_object = settings.S3_CONN.Object(settings.S3_PAGES_BUCKET_NAME, - self.feed.s3_pages_key) - s3_object.put(Body=compress_string_with_gzip(html.encode('utf-8')), - ContentType='text/html', - ContentEncoding='gzip', - Expires=expires, - ACL='public-read' - ) - + s3_object = settings.S3_CONN.Object(settings.S3_PAGES_BUCKET_NAME, self.feed.s3_pages_key) + s3_object.put( + Body=compress_string_with_gzip(html.encode("utf-8")), + ContentType="text/html", + ContentEncoding="gzip", + Expires=expires, + ACL="public-read", + ) + try: feed_page = MFeedPage.objects.get(feed_id=self.feed.pk) feed_page.delete() - logging.debug(' ---> [%-30s] ~FYTransfering page data to S3...' % (self.feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FYTransfering page data to S3..." % (self.feed.log_title[:30])) except MFeedPage.DoesNotExist: pass - + if not self.feed.s3_page: self.feed.s3_page = True self.feed.save() - + return True - + def delete_page_s3(self): k = settings.S3_CONN.Bucket(settings.S3_PAGES_BUCKET_NAME).Object(key=self.feed.s3_pages_key) k.delete() - + self.feed.s3_page = False self.feed.save() diff --git a/apps/rss_feeds/tasks.py b/apps/rss_feeds/tasks.py index 1ad6003584..2340e55b5e 100644 --- a/apps/rss_feeds/tasks.py +++ b/apps/rss_feeds/tasks.py @@ -14,204 +14,227 @@ FEED_TASKING_MAX = 10000 -@app.task(name='task-feeds') + +@app.task(name="task-feeds") def TaskFeeds(): - from apps.rss_feeds.models import Feed + from apps.rss_feeds.models import Feed + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - tasked_feeds_size = r.zcard('tasked_feeds') - + tasked_feeds_size = r.zcard("tasked_feeds") + hour_ago = now - datetime.timedelta(hours=1) - r.zremrangebyscore('fetched_feeds_last_hour', 0, int(hour_ago.strftime('%s'))) - + r.zremrangebyscore("fetched_feeds_last_hour", 0, int(hour_ago.strftime("%s"))) + now_timestamp = int(now.strftime("%s")) - queued_feeds = r.zrangebyscore('scheduled_updates', 0, now_timestamp) - r.zremrangebyscore('scheduled_updates', 0, now_timestamp) + queued_feeds = r.zrangebyscore("scheduled_updates", 0, now_timestamp) + r.zremrangebyscore("scheduled_updates", 0, now_timestamp) if not queued_feeds: logging.debug(" ---> ~SN~FB~BMNo feeds to queue! Exiting...") return - - r.sadd('queued_feeds', *queued_feeds) - logging.debug(" ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - len(queued_feeds), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) - + + r.sadd("queued_feeds", *queued_feeds) + logging.debug( + " ---> ~SN~FBQueuing ~SB%s~SN stale feeds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % (len(queued_feeds), r.zcard("tasked_feeds"), r.scard("queued_feeds"), r.zcard("scheduled_updates")) + ) + # Regular feeds if tasked_feeds_size < FEED_TASKING_MAX: - feeds = r.srandmember('queued_feeds', FEED_TASKING_MAX) + feeds = r.srandmember("queued_feeds", FEED_TASKING_MAX) Feed.task_feeds(feeds, verbose=True) active_count = len(feeds) else: logging.debug(" ---> ~SN~FBToo many tasked feeds. ~SB%s~SN tasked." % tasked_feeds_size) active_count = 0 feeds = [] - - logging.debug(" ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - active_count, - int((time.time() - start)), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) + + logging.debug( + " ---> ~SN~FBTasking %s feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % ( + active_count, + int((time.time() - start)), + r.zcard("tasked_feeds"), + r.scard("queued_feeds"), + r.zcard("scheduled_updates"), + ) + ) logging.debug(" ---> ~FBFeeds being tasked: ~SB%s" % feeds) -@app.task(name='task-broken-feeds') + +@app.task(name="task-broken-feeds") def TaskBrokenFeeds(): - from apps.rss_feeds.models import Feed + from apps.rss_feeds.models import Feed + settings.LOG_TO_STREAM = True now = datetime.datetime.utcnow() start = time.time() r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + logging.debug(" ---> ~SN~FBQueuing broken feeds...") - + # Force refresh feeds - refresh_feeds = Feed.objects.filter( - active=True, - fetched_once=False, - active_subscribers__gte=1 - ).order_by('?')[:100] + refresh_feeds = Feed.objects.filter(active=True, fetched_once=False, active_subscribers__gte=1).order_by( + "?" + )[:100] refresh_count = refresh_feeds.count() cp1 = time.time() - + logging.debug(" ---> ~SN~FBFound %s active, unfetched broken feeds" % refresh_count) # Mistakenly inactive feeds - hours_ago = (now - datetime.timedelta(minutes=10)).strftime('%s') - old_tasked_feeds = r.zrangebyscore('tasked_feeds', 0, hours_ago) + hours_ago = (now - datetime.timedelta(minutes=10)).strftime("%s") + old_tasked_feeds = r.zrangebyscore("tasked_feeds", 0, hours_ago) inactive_count = len(old_tasked_feeds) if inactive_count: - r.zremrangebyscore('tasked_feeds', 0, hours_ago) + r.zremrangebyscore("tasked_feeds", 0, hours_ago) # r.sadd('queued_feeds', *old_tasked_feeds) for feed_id in old_tasked_feeds: - r.zincrby('error_feeds', 1, feed_id) + r.zincrby("error_feeds", 1, feed_id) feed = Feed.get_by_id(feed_id) feed.set_next_scheduled_update() - logging.debug(" ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" % ( - inactive_count, - r.scard('queued_feeds'), - r.zcard('tasked_feeds'))) + logging.debug( + " ---> ~SN~FBRe-queuing ~SB%s~SN dropped/broken feeds (~SB%s/%s~SN queued/tasked)" + % (inactive_count, r.scard("queued_feeds"), r.zcard("tasked_feeds")) + ) cp2 = time.time() - + old = now - datetime.timedelta(days=1) - old_feeds = Feed.objects.filter( - next_scheduled_update__lte=old, - active_subscribers__gte=1 - ).order_by('?')[:500] + old_feeds = Feed.objects.filter(next_scheduled_update__lte=old, active_subscribers__gte=1).order_by("?")[ + :500 + ] old_count = old_feeds.count() cp3 = time.time() - - logging.debug(" ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" % ( - refresh_count, - inactive_count, - old_count, - cp1 - start, - cp2 - cp1, - cp3 - cp2, - )) - + + logging.debug( + " ---> ~SN~FBTasking ~SBrefresh:~FC%s~FB inactive:~FC%s~FB old:~FC%s~SN~FB broken feeds... (%.4s/%.4s/%.4s)" + % ( + refresh_count, + inactive_count, + old_count, + cp1 - start, + cp2 - cp1, + cp3 - cp2, + ) + ) + Feed.task_feeds(refresh_feeds, verbose=False) Feed.task_feeds(old_feeds, verbose=False) - - logging.debug(" ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" % ( - int((time.time() - start)), - r.zcard('tasked_feeds'), - r.scard('queued_feeds'), - r.zcard('scheduled_updates'))) - -@app.task(name='update-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True) + + logging.debug( + " ---> ~SN~FBTasking broken feeds took ~SB%s~SN seconds (~SB%s~SN/~FG%s~FB~SN/%s tasked/queued/scheduled)" + % ( + int((time.time() - start)), + r.zcard("tasked_feeds"), + r.scard("queued_feeds"), + r.zcard("scheduled_updates"), + ) + ) + + +@app.task(name="update-feeds", time_limit=10 * 60, soft_time_limit=9 * 60, ignore_result=True) def UpdateFeeds(feed_pks): from apps.rss_feeds.models import Feed from apps.statistics.models import MStatistics + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) + mongodb_replication_lag = int(MStatistics.get("mongodb_replication_lag", 0)) compute_scores = bool(mongodb_replication_lag < 10) - + profiler = DBProfilerMiddleware() profiler_activated = profiler.process_celery() if profiler_activated: settings.MONGO_COMMAND_LOGGER.process_celery(profiler) redis_middleware = RedisDumpMiddleware() redis_middleware.process_celery(profiler) - + options = { - 'quick': float(MStatistics.get('quick_fetch', 0)), - 'updates_off': MStatistics.get('updates_off', False), - 'compute_scores': compute_scores, - 'mongodb_replication_lag': mongodb_replication_lag, + "quick": float(MStatistics.get("quick_fetch", 0)), + "updates_off": MStatistics.get("updates_off", False), + "compute_scores": compute_scores, + "mongodb_replication_lag": mongodb_replication_lag, } - + if not isinstance(feed_pks, list): feed_pks = [feed_pks] - + for feed_pk in feed_pks: feed = Feed.get_by_id(feed_pk) if not feed or feed.pk != int(feed_pk): - logging.info(" ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." % (feed_pk, feed and feed.pk)) - r.zrem('tasked_feeds', feed_pk) + logging.info( + " ---> ~FRRemoving feed_id %s from tasked_feeds queue, points to %s..." + % (feed_pk, feed and feed.pk) + ) + r.zrem("tasked_feeds", feed_pk) if not feed: continue try: feed.update(**options) except SoftTimeLimitExceeded as e: - feed.save_feed_history(505, 'Timeout', e) + feed.save_feed_history(505, "Timeout", e) logging.info(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed) - if profiler_activated: profiler.process_celery_finished() + if profiler_activated: + profiler.process_celery_finished() + -@app.task(name='new-feeds', time_limit=10*60, soft_time_limit=9*60, ignore_result=True) +@app.task(name="new-feeds", time_limit=10 * 60, soft_time_limit=9 * 60, ignore_result=True) def NewFeeds(feed_pks): from apps.rss_feeds.models import Feed + if not isinstance(feed_pks, list): feed_pks = [feed_pks] - + options = {} for feed_pk in feed_pks: feed = Feed.get_by_id(feed_pk) - if not feed: continue + if not feed: + continue feed.update(options=options) -@app.task(name='push-feeds', ignore_result=True) + +@app.task(name="push-feeds", ignore_result=True) def PushFeeds(feed_id, xml): from apps.rss_feeds.models import Feed from apps.statistics.models import MStatistics - - mongodb_replication_lag = int(MStatistics.get('mongodb_replication_lag', 0)) + + mongodb_replication_lag = int(MStatistics.get("mongodb_replication_lag", 0)) compute_scores = bool(mongodb_replication_lag < 60) - + options = { - 'feed_xml': xml, - 'compute_scores': compute_scores, - 'mongodb_replication_lag': mongodb_replication_lag, + "feed_xml": xml, + "compute_scores": compute_scores, + "mongodb_replication_lag": mongodb_replication_lag, } feed = Feed.get_by_id(feed_id) if feed: feed.update(options=options) + @app.task() def ScheduleImmediateFetches(feed_ids, user_id=None): from apps.rss_feeds.models import Feed - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - + Feed.schedule_feed_fetches_immediately(feed_ids, user_id=user_id) @app.task() def SchedulePremiumSetup(feed_ids): from apps.rss_feeds.models import Feed - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - + Feed.setup_feeds_for_premium_subscribers(feed_ids) - + + @app.task() def ScheduleCountTagsForUser(user_id): from apps.rss_feeds.models import MStarredStoryCounts - + MStarredStoryCounts.count_for_user(user_id) diff --git a/apps/rss_feeds/test_rss_feeds.py b/apps/rss_feeds/test_rss_feeds.py index 8e3ca41e22..6ad0dc764c 100644 --- a/apps/rss_feeds/test_rss_feeds.py +++ b/apps/rss_feeds/test_rss_feeds.py @@ -10,31 +10,34 @@ class Test_Feed(TestCase): - - fixtures = ['initial_data.json'] + fixtures = ["initial_data.json"] def setUp(self): disconnect() - settings.MONGODB = connect('test_newsblur') - settings.REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=settings.REDIS_STORY['host'], port=6379, db=10) - settings.REDIS_FEED_READ_POOL = redis.ConnectionPool(host=settings.REDIS_SESSIONS['host'], port=6379, db=10) + settings.MONGODB = connect("test_newsblur") + settings.REDIS_STORY_HASH_POOL = redis.ConnectionPool( + host=settings.REDIS_STORY["host"], port=6379, db=10 + ) + settings.REDIS_FEED_READ_POOL = redis.ConnectionPool( + host=settings.REDIS_SESSIONS["host"], port=6379, db=10 + ) r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - r.delete('RS:1') - r.delete('lRS:1') - r.delete('RS:1:766') - r.delete('zF:766') - r.delete('F:766') - + r.delete("RS:1") + r.delete("lRS:1") + r.delete("RS:1:766") + r.delete("zF:766") + r.delete("F:766") + self.client = Client() def tearDown(self): - settings.MONGODB.drop_database('test_newsblur') + settings.MONGODB.drop_database("test_newsblur") def test_load_feeds__gawker(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'gawker1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gawker1.json", verbosity=0, skip_checks=False) feed = Feed.objects.get(pk=10) stories = MStory.objects(story_feed_id=feed.pk) @@ -45,7 +48,7 @@ def test_load_feeds__gawker(self): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - management.call_command('loaddata', 'gawker2.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gawker2.json", verbosity=0, skip_checks=False) feed.update(force=True) @@ -53,16 +56,16 @@ def test_load_feeds__gawker(self): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - url = reverse('load-single-feed', kwargs=dict(feed_id=10)) + url = reverse("load-single-feed", kwargs=dict(feed_id=10)) response = self.client.get(url) feed = json.decode(response.content) - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) def test_load_feeds__gothamist(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'gothamist_aug_2009_1.json', verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='gothamist') + management.call_command("loaddata", "gothamist_aug_2009_1.json", verbosity=0, skip_checks=False) + feed = Feed.objects.get(feed_link__contains="gothamist") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) @@ -71,177 +74,179 @@ def test_load_feeds__gothamist(self): stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 42) - url = reverse('load-single-feed', kwargs=dict(feed_id=4)) + url = reverse("load-single-feed", kwargs=dict(feed_id=4)) response = self.client.get(url) content = json.decode(response.content) - self.assertEqual(len(content['stories']), 6) + self.assertEqual(len(content["stories"]), 6) - management.call_command('loaddata', 'gothamist_aug_2009_2.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "gothamist_aug_2009_2.json", verbosity=0, skip_checks=False) feed.update(force=True) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 42) - url = reverse('load-single-feed', kwargs=dict(feed_id=4)) + url = reverse("load-single-feed", kwargs=dict(feed_id=4)) response = self.client.get(url) # print [c['story_title'] for c in json.decode(response.content)] content = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(content['stories']), 6) + self.assertEqual(len(content["stories"]), 6) def test_load_feeds__slashdot(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") old_story_guid = "tag:google.com,2005:reader/item/4528442633bc7b2b" - management.call_command('loaddata', 'slashdot1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "slashdot1.json", verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='slashdot') + feed = Feed.objects.get(feed_link__contains="slashdot") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=1, feed=5, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=5, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 38) + self.assertEqual(content["feeds"]["5"]["nt"], 38) - self.client.post(reverse('mark-story-as-read'), {'story_id': old_story_guid, 'feed_id': 5}) + self.client.post(reverse("mark-story-as-read"), {"story_id": old_story_guid, "feed_id": 5}) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 37) + self.assertEqual(content["feeds"]["5"]["nt"], 37) - management.call_command('loaddata', 'slashdot2.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=5, daemonize=False, skip_checks=False) + management.call_command("loaddata", "slashdot2.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=5, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 38) - url = reverse('load-single-feed', kwargs=dict(feed_id=5)) + url = reverse("load-single-feed", kwargs=dict(feed_id=5)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['5']['nt'], 37) + self.assertEqual(content["feeds"]["5"]["nt"], 37) def test_load_feeds__motherjones(self): - self.client.login(username='conesus', password='test') + self.client.login(username="conesus", password="test") - management.call_command('loaddata', 'motherjones1.json', verbosity=0, skip_checks=False) + management.call_command("loaddata", "motherjones1.json", verbosity=0, skip_checks=False) - feed = Feed.objects.get(feed_link__contains='motherjones') + feed = Feed.objects.get(feed_link__contains="motherjones") stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=1, feed=feed.pk, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=feed.pk, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 10) - response = self.client.get(reverse('load-feeds')) + response = self.client.get(reverse("load-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed.pk)]['nt'], 10) + self.assertEqual(content["feeds"][str(feed.pk)]["nt"], 10) - self.client.post(reverse('mark-story-as-read'), {'story_id': stories[0].story_guid, 'feed_id': feed.pk}) + self.client.post( + reverse("mark-story-as-read"), {"story_id": stories[0].story_guid, "feed_id": feed.pk} + ) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed.pk)]['nt'], 9) + self.assertEqual(content["feeds"][str(feed.pk)]["nt"], 9) - management.call_command('loaddata', 'motherjones2.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=feed.pk, daemonize=False, skip_checks=False) + management.call_command("loaddata", "motherjones2.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=feed.pk, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 10) - url = reverse('load-single-feed', kwargs=dict(feed_id=feed.pk)) + url = reverse("load-single-feed", kwargs=dict(feed_id=feed.pk)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds'][str(feed['feed_id'])]['nt'], 9) + self.assertEqual(content["feeds"][str(feed["feed_id"])]["nt"], 9) def test_load_feeds__google(self): # Freezegun the date to 2017-04-30 - - self.client.login(username='conesus', password='test') + + self.client.login(username="conesus", password="test") old_story_guid = "blog.google:443/topics/inside-google/google-earths-incredible-3d-imagery-explained/" - management.call_command('loaddata', 'google1.json', verbosity=1, skip_checks=False) + management.call_command("loaddata", "google1.json", verbosity=1, skip_checks=False) print((Feed.objects.all())) feed = Feed.objects.get(pk=766) print((" Testing test_load_feeds__google: %s" % feed)) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 0) - management.call_command('refresh_feed', force=False, feed=766, daemonize=False, skip_checks=False) + management.call_command("refresh_feed", force=False, feed=766, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 20) - response = self.client.get(reverse('load-feeds')+"?update_counts=true") + response = self.client.get(reverse("load-feeds") + "?update_counts=true") content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 20) + self.assertEqual(content["feeds"]["766"]["nt"], 20) old_story = MStory.objects.get(story_feed_id=feed.pk, story_guid__contains=old_story_guid) - self.client.post(reverse('mark-story-hashes-as-read'), {'story_hash': old_story.story_hash}) + self.client.post(reverse("mark-story-hashes-as-read"), {"story_hash": old_story.story_hash}) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 19) + self.assertEqual(content["feeds"]["766"]["nt"], 19) - management.call_command('loaddata', 'google2.json', verbosity=1, skip_checks=False) - management.call_command('refresh_feed', force=False, feed=766, daemonize=False, skip_checks=False) + management.call_command("loaddata", "google2.json", verbosity=1, skip_checks=False) + management.call_command("refresh_feed", force=False, feed=766, daemonize=False, skip_checks=False) stories = MStory.objects(story_feed_id=feed.pk) self.assertEqual(stories.count(), 20) - url = reverse('load-single-feed', kwargs=dict(feed_id=766)) + url = reverse("load-single-feed", kwargs=dict(feed_id=766)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) - response = self.client.get(reverse('refresh-feeds')) + response = self.client.get(reverse("refresh-feeds")) content = json.decode(response.content) - self.assertEqual(content['feeds']['766']['nt'], 19) - + self.assertEqual(content["feeds"]["766"]["nt"], 19) + def test_load_feeds__brokelyn__invalid_xml(self): BROKELYN_FEED_ID = 16 - self.client.login(username='conesus', password='test') - management.call_command('loaddata', 'brokelyn.json', verbosity=0) + self.client.login(username="conesus", password="test") + management.call_command("loaddata", "brokelyn.json", verbosity=0) self.assertEquals(Feed.objects.get(pk=BROKELYN_FEED_ID).pk, BROKELYN_FEED_ID) - management.call_command('refresh_feed', force=1, feed=BROKELYN_FEED_ID, daemonize=False) + management.call_command("refresh_feed", force=1, feed=BROKELYN_FEED_ID, daemonize=False) - management.call_command('loaddata', 'brokelyn.json', verbosity=0, skip_checks=False) - management.call_command('refresh_feed', force=1, feed=16, daemonize=False, skip_checks=False) + management.call_command("loaddata", "brokelyn.json", verbosity=0, skip_checks=False) + management.call_command("refresh_feed", force=1, feed=16, daemonize=False, skip_checks=False) - url = reverse('load-single-feed', kwargs=dict(feed_id=BROKELYN_FEED_ID)) + url = reverse("load-single-feed", kwargs=dict(feed_id=BROKELYN_FEED_ID)) response = self.client.get(url) # pprint([c['story_title'] for c in json.decode(response.content)]) feed = json.decode(response.content) # Test: 1 changed char in title - self.assertEqual(len(feed['stories']), 6) + self.assertEqual(len(feed["stories"]), 6) def test_all_feeds(self): pass diff --git a/apps/rss_feeds/text_importer.py b/apps/rss_feeds/text_importer.py index d89d8af05e..1f27714285 100644 --- a/apps/rss_feeds/text_importer.py +++ b/apps/rss_feeds/text_importer.py @@ -18,15 +18,14 @@ from django.contrib.sites.models import Site from bs4 import BeautifulSoup from urllib.parse import urljoin - + BROKEN_URLS = [ "gamespot.com", - 'thedailyskip.com', + "thedailyskip.com", ] class TextImporter: - def __init__(self, story=None, feed=None, story_url=None, request=None, debug=False): self.story = story self.story_url = story_url @@ -38,31 +37,36 @@ def __init__(self, story=None, feed=None, story_url=None, request=None, debug=Fa @property def headers(self): - num_subscribers = getattr(self.feed, 'num_subscribers', 0) + num_subscribers = getattr(self.feed, "num_subscribers", 0) return { - 'User-Agent': 'NewsBlur Content Fetcher - %s subscriber%s - %s %s' % ( - num_subscribers, - 's' if num_subscribers != 1 else '', - getattr(self.feed, 'permalink', ''), - getattr(self.feed, 'fake_user_agent', ''), - ), + "User-Agent": "NewsBlur Content Fetcher - %s subscriber%s - %s %s" + % ( + num_subscribers, + "s" if num_subscribers != 1 else "", + getattr(self.feed, "permalink", ""), + getattr(self.feed, "fake_user_agent", ""), + ), } def fetch(self, skip_save=False, return_document=False, use_mercury=True): if self.story_url and any(broken_url in self.story_url for broken_url in BROKEN_URLS): logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: banned") return - + if use_mercury: results = self.fetch_mercury(skip_save=skip_save, return_document=return_document) - + if not use_mercury or not results: - logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", warn_color=False) + logging.user( + self.request, + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY with Mercury, trying readability...", + warn_color=False, + ) results = self.fetch_manually(skip_save=skip_save, return_document=return_document) - + return results - + def fetch_mercury(self, skip_save=False, return_document=False): try: resp = self.fetch_request(use_mercury=True) @@ -72,29 +76,35 @@ def fetch_mercury(self, skip_save=False, return_document=False): except requests.exceptions.TooManyRedirects: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: too many redirects") resp = None - + if not resp: return - + try: doc = resp.json() except JSONDecodeError: doc = None - if not doc or doc.get('error', False): - logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % (doc and doc.get('messages', None) or "[unknown mercury error]")) + if not doc or doc.get("error", False): + logging.user( + self.request, + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" + % (doc and doc.get("messages", None) or "[unknown mercury error]"), + ) return - - text = doc['content'] - title = doc['title'] - url = doc['url'] - image = doc['lead_image_url'] - - if image and ('http://' in image[1:] or 'https://' in image[1:]): + + text = doc["content"] + title = doc["title"] + url = doc["url"] + image = doc["lead_image_url"] + + if image and ("http://" in image[1:] or "https://" in image[1:]): logging.user(self.request, "~SN~FRRemoving broken image from text: %s" % image) image = None - - return self.process_content(text, title, url, image, skip_save=skip_save, return_document=return_document) - + + return self.process_content( + text, title, url, image, skip_save=skip_save, return_document=return_document + ) + def fetch_manually(self, skip_save=False, return_document=False): try: resp = self.fetch_request(use_mercury=False) @@ -115,15 +125,16 @@ def extract_text(resp): except (LookupError, TypeError): text = resp.content return text + try: text = extract_text(resp) except TimeoutError: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: timed out on resp.text") return - + # if self.debug: # logging.user(self.request, "~FBOriginal text's website: %s" % text) - + # if resp.encoding and resp.encoding != 'utf-8': # try: # text = text.encode(resp.encoding) @@ -131,11 +142,12 @@ def extract_text(resp): # pass if text: - text = text.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - text = text.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + text = text.replace("\xc2\xa0", " ") # Non-breaking space, is mangled when encoding is not utf-8 + text = text.replace("\\u00a0", " ") # Non-breaking space, is mangled when encoding is not utf-8 - original_text_doc = readability.Document(text, url=resp.url, - positive_keywords="post, entry, postProp, article, postContent, postField") + original_text_doc = readability.Document( + text, url=resp.url, positive_keywords="post, entry, postProp, article, postContent, postField" + ) try: content = original_text_doc.summary(html_partial=True) except (ParserError, Unparseable) as e: @@ -148,18 +160,29 @@ def extract_text(resp): title = "" url = resp.url - - return self.process_content(content, title, url, image=None, skip_save=skip_save, return_document=return_document, - original_text_doc=original_text_doc) - - def process_content(self, content, title, url, image, skip_save=False, return_document=False, original_text_doc=None): - original_story_content = self.story and self.story.story_content_z and zlib.decompress(self.story.story_content_z) + + return self.process_content( + content, + title, + url, + image=None, + skip_save=skip_save, + return_document=return_document, + original_text_doc=original_text_doc, + ) + + def process_content( + self, content, title, url, image, skip_save=False, return_document=False, original_text_doc=None + ): + original_story_content = ( + self.story and self.story.story_content_z and zlib.decompress(self.story.story_content_z) + ) if not original_story_content: original_story_content = "" story_image_urls = self.story and self.story.image_urls if not story_image_urls: story_image_urls = [] - + content = self.add_hero_image(content, story_image_urls) if content: content = self.rewrite_content(content) @@ -169,25 +192,36 @@ def process_content(self, content, title, url, image, skip_save=False, return_do full_content_is_longer = True elif len(content) > len(original_story_content): full_content_is_longer = True - + if content and full_content_is_longer: if self.story and not skip_save: self.story.original_text_z = zlib.compress(smart_bytes(content)) try: self.story.save() except NotUniqueError as e: - logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: %s" % (e)), warn_color=False) + logging.user( + self.request, ("~SN~FYFetched ~FGoriginal text~FY: %s" % (e)), warn_color=False + ) pass - logging.user(self.request, ("~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" % ( - len(content), - len(original_story_content) - )), warn_color=False) + logging.user( + self.request, + ( + "~SN~FYFetched ~FGoriginal text~FY: now ~SB%s bytes~SN vs. was ~SB%s bytes" + % (len(content), len(original_story_content)) + ), + warn_color=False, + ) else: - logging.user(self.request, ("~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" % ( - len(original_story_content) - )), warn_color=False) + logging.user( + self.request, + ( + "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: was ~SB%s bytes" + % (len(original_story_content)) + ), + warn_color=False, + ) return - + if return_document: return dict(content=content, title=title, url=url, doc=original_text_doc, image=image) @@ -195,21 +229,22 @@ def process_content(self, content, title, url, image, skip_save=False, return_do def add_hero_image(self, content, image_urls): # Need to have images in the original story to add to the text that may not have any images - if not len(image_urls): + if not len(image_urls): return content - + content_soup = BeautifulSoup(content, features="lxml") - content_imgs = content_soup.findAll('img') + content_imgs = content_soup.findAll("img") for img in content_imgs: # Since NewsBlur proxies all http images over https, the url can change, so acknowledge urls # that are https on the original text but http on the feed - if not img.get('src'): continue - if img.get('src') in image_urls: - image_urls.remove(img.get('src')) - elif img.get('src').replace('https:', 'http:') in image_urls: - image_urls.remove(img.get('src').replace('https:', 'http:')) - + if not img.get("src"): + continue + if img.get("src") in image_urls: + image_urls.remove(img.get("src")) + elif img.get("src").replace("https:", "http:") in image_urls: + image_urls.remove(img.get("src").replace("https:", "http:")) + if len(image_urls): image_content = f'' content = f"{image_content}\n {content}" @@ -218,48 +253,55 @@ def add_hero_image(self, content, image_urls): def rewrite_content(self, content): soup = BeautifulSoup(content, features="lxml") - - for noscript in soup.findAll('noscript'): + + for noscript in soup.findAll("noscript"): if len(noscript.contents) > 0: noscript.replaceWith(noscript.contents[0]) - + content = str(soup) - - images = set([img.attrs['src'] for img in soup.findAll('img') if 'src' in img.attrs]) + + images = set([img.attrs["src"] for img in soup.findAll("img") if "src" in img.attrs]) for image_url in images: abs_image_url = urljoin(self.story_url, image_url) content = content.replace(image_url, abs_image_url) - + return content - + @timelimit(10) def fetch_request(self, use_mercury=True): headers = self.headers url = self.story_url - + if use_mercury: - mercury_api_key = getattr(settings, 'MERCURY_PARSER_API_KEY', 'abc123') + mercury_api_key = getattr(settings, "MERCURY_PARSER_API_KEY", "abc123") headers["content-type"] = "application/json" headers["x-api-key"] = mercury_api_key domain = Site.objects.get_current().domain protocol = "https" if settings.DOCKERBUILD: - domain = 'haproxy' + domain = "haproxy" protocol = "http" url = f"{protocol}://{domain}/rss_feeds/original_text_fetcher?url={url}" - + try: r = requests.get(url, headers=headers, timeout=15) r.connection.close() - except (AttributeError, SocketError, requests.ConnectionError, - requests.models.MissingSchema, requests.sessions.InvalidSchema, - requests.sessions.TooManyRedirects, - requests.models.InvalidURL, - requests.models.ChunkedEncodingError, - requests.models.ContentDecodingError, - requests.adapters.ReadTimeout, - urllib3.exceptions.LocationValueError, - LocationParseError, OpenSSLError, PyAsn1Error) as e: + except ( + AttributeError, + SocketError, + requests.ConnectionError, + requests.models.MissingSchema, + requests.sessions.InvalidSchema, + requests.sessions.TooManyRedirects, + requests.models.InvalidURL, + requests.models.ChunkedEncodingError, + requests.models.ContentDecodingError, + requests.adapters.ReadTimeout, + urllib3.exceptions.LocationValueError, + LocationParseError, + OpenSSLError, + PyAsn1Error, + ) as e: logging.user(self.request, "~SN~FRFailed~FY to fetch ~FGoriginal text~FY: %s" % e) return return r diff --git a/apps/rss_feeds/urls.py b/apps/rss_feeds/urls.py index f1d7e1f7fb..890b0c8144 100644 --- a/apps/rss_feeds/urls.py +++ b/apps/rss_feeds/urls.py @@ -2,19 +2,27 @@ from apps.rss_feeds import views urlpatterns = [ - url(r'^feed_autocomplete', views.feed_autocomplete, name='feed-autocomplete'), - url(r'^search_feed', views.search_feed, name='search-feed'), - url(r'^statistics/(?P\d+)', views.load_feed_statistics, name='feed-statistics'), - url(r'^statistics_embedded/(?P\d+)', views.load_feed_statistics_embedded, name='feed-statistics-embedded'), - url(r'^feed_settings/(?P\d+)', views.load_feed_settings, name='feed-settings'), - url(r'^feed/(?P\d+)/?', views.load_single_feed, name='feed-info'), - url(r'^icon/(?P\d+)/?', views.load_feed_favicon, name='feed-favicon'), - url(r'^exception_retry', views.exception_retry, name='exception-retry'), - url(r'^exception_change_feed_address', views.exception_change_feed_address, name='exception-change-feed-address'), - url(r'^exception_change_feed_link', views.exception_change_feed_link, name='exception-change-feed-link'), - url(r'^status', views.status, name='status'), - url(r'^load_single_feed', views.load_single_feed, name='feed-canonical'), - url(r'^original_text', views.original_text, name='original-text'), - url(r'^original_story', views.original_story, name='original-story'), - url(r'^story_changes', views.story_changes, name='story-changes'), + url(r"^feed_autocomplete", views.feed_autocomplete, name="feed-autocomplete"), + url(r"^search_feed", views.search_feed, name="search-feed"), + url(r"^statistics/(?P\d+)", views.load_feed_statistics, name="feed-statistics"), + url( + r"^statistics_embedded/(?P\d+)", + views.load_feed_statistics_embedded, + name="feed-statistics-embedded", + ), + url(r"^feed_settings/(?P\d+)", views.load_feed_settings, name="feed-settings"), + url(r"^feed/(?P\d+)/?", views.load_single_feed, name="feed-info"), + url(r"^icon/(?P\d+)/?", views.load_feed_favicon, name="feed-favicon"), + url(r"^exception_retry", views.exception_retry, name="exception-retry"), + url( + r"^exception_change_feed_address", + views.exception_change_feed_address, + name="exception-change-feed-address", + ), + url(r"^exception_change_feed_link", views.exception_change_feed_link, name="exception-change-feed-link"), + url(r"^status", views.status, name="status"), + url(r"^load_single_feed", views.load_single_feed, name="feed-canonical"), + url(r"^original_text", views.original_text, name="original-text"), + url(r"^original_story", views.original_story, name="original-story"), + url(r"^story_changes", views.story_changes, name="story-changes"), ] diff --git a/apps/rss_feeds/views.py b/apps/rss_feeds/views.py index 91a916b4a1..5a062a6e55 100644 --- a/apps/rss_feeds/views.py +++ b/apps/rss_feeds/views.py @@ -9,6 +9,7 @@ from django.conf import settings from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User + # from django.db import IntegrityError from apps.rss_feeds.models import Feed, merge_feeds from apps.rss_feeds.models import MFetchHistory @@ -35,16 +36,17 @@ "latitude", ] + @ajax_login_required @json.json_view def search_feed(request): - address = request.GET.get('address') - offset = int(request.GET.get('offset', 0)) + address = request.GET.get("address") + offset = int(request.GET.get("offset", 0)) if not address: return dict(code=-1, message="Please provide a URL/address.") - + logging.user(request.user, "~FBFinding feed (search_feed): %s" % address) - ip = request.META.get('HTTP_X_FORWARDED_FOR', None) or request.META['REMOTE_ADDR'] + ip = request.META.get("HTTP_X_FORWARDED_FOR", None) or request.META["REMOTE_ADDR"] logging.user(request.user, "~FBIP: %s" % ip) aggressive = request.user.is_authenticated feed = Feed.get_feed_from_url(address, create=False, aggressive=aggressive, offset=offset) @@ -52,7 +54,8 @@ def search_feed(request): return feed.canonical() else: return dict(code=-1, message="No feed found matching that XML or website address.") - + + @json.json_view def load_single_feed(request, feed_id): user = get_user(request) @@ -60,18 +63,20 @@ def load_single_feed(request, feed_id): classifiers = get_classifiers_for_user(user, feed_id=feed.pk) payload = feed.canonical(full=True) - payload['classifiers'] = classifiers + payload["classifiers"] = classifiers return payload + def feed_favicon_etag(request, feed_id): try: feed_icon = MFeedIcon.objects.get(feed_id=feed_id) except MFeedIcon.DoesNotExist: return - + return feed_icon.color - + + @condition(etag_func=feed_favicon_etag) def load_feed_favicon(request, feed_id): not_found = False @@ -80,112 +85,126 @@ def load_feed_favicon(request, feed_id): except MFeedIcon.DoesNotExist: logging.user(request, "~FBNo feed icon found: %s" % feed_id) not_found = True - + if not_found or not feed_icon.data: - return HttpResponseRedirect(settings.MEDIA_URL + 'img/icons/nouns/world.svg') - + return HttpResponseRedirect(settings.MEDIA_URL + "img/icons/nouns/world.svg") + icon_data = base64.b64decode(feed_icon.data) - return HttpResponse(icon_data, content_type='image/png') + return HttpResponse(icon_data, content_type="image/png") + @json.json_view def feed_autocomplete(request): - query = request.GET.get('term') or request.GET.get('query') - version = int(request.GET.get('v', 1)) - autocomplete_format = request.GET.get('format', 'autocomplete') - + query = request.GET.get("term") or request.GET.get("query") + version = int(request.GET.get("v", 1)) + autocomplete_format = request.GET.get("format", "autocomplete") + # user = get_user(request) # if True or not user.profile.is_premium: # return dict(code=-1, message="Overloaded, no autocomplete results.", feeds=[], term=query) - + if not query: return dict(code=-1, message="Specify a search 'term'.", feeds=[], term=query) - - if '.' in query: + + if "." in query: try: parts = urlparse(query) - if not parts.hostname and not query.startswith('http'): - parts = urlparse('http://%s' % query) + if not parts.hostname and not query.startswith("http"): + parts = urlparse("http://%s" % query) if parts.hostname: query = [parts.hostname] - query.extend([p for p in parts.path.split('/') if p]) - query = ' '.join(query) + query.extend([p for p in parts.path.split("/") if p]) + query = " ".join(query) except: logging.user(request, "~FGAdd search, could not parse url in ~FR%s" % query) - - query_params = query.split(' ') + + query_params = query.split(" ") tries_left = 5 while len(query_params) and tries_left: tries_left -= 1 - feed_ids = Feed.autocomplete(' '.join(query_params)) + feed_ids = Feed.autocomplete(" ".join(query_params)) if feed_ids: break else: query_params = query_params[:-1] - + feeds = list(set([Feed.get_by_id(feed_id) for feed_id in feed_ids])) feeds = [feed for feed in feeds if feed and not feed.branch_from_feed] feeds = [feed for feed in feeds if all([x not in feed.feed_address for x in IGNORE_AUTOCOMPLETE])] - - if autocomplete_format == 'autocomplete': - feeds = [{ - 'id': feed.pk, - 'value': feed.feed_address, - 'label': feed.feed_title, - 'tagline': feed.data and feed.data.feed_tagline, - 'num_subscribers': feed.num_subscribers, - } for feed in feeds] + + if autocomplete_format == "autocomplete": + feeds = [ + { + "id": feed.pk, + "value": feed.feed_address, + "label": feed.feed_title, + "tagline": feed.data and feed.data.feed_tagline, + "num_subscribers": feed.num_subscribers, + } + for feed in feeds + ] else: feeds = [feed.canonical(full=True) for feed in feeds] - feeds = sorted(feeds, key=lambda f: -1 * f['num_subscribers']) - - feed_ids = [f['id'] for f in feeds] + feeds = sorted(feeds, key=lambda f: -1 * f["num_subscribers"]) + + feed_ids = [f["id"] for f in feeds] feed_icons = dict((icon.feed_id, icon) for icon in MFeedIcon.objects.filter(feed_id__in=feed_ids)) - + for feed in feeds: - if feed['id'] in feed_icons: - feed_icon = feed_icons[feed['id']] + if feed["id"] in feed_icons: + feed_icon = feed_icons[feed["id"]] if feed_icon.data: - feed['favicon_color'] = feed_icon.color - feed['favicon'] = feed_icon.data + feed["favicon_color"] = feed_icon.color + feed["favicon"] = feed_icon.data + + logging.user( + request, + "~FGAdd Search: ~SB%s ~SN(%s matches)" + % ( + query, + len(feeds), + ), + ) - logging.user(request, "~FGAdd Search: ~SB%s ~SN(%s matches)" % (query, len(feeds),)) - if version > 1: return { - 'feeds': feeds, - 'term': query, + "feeds": feeds, + "term": query, } else: return feeds - + + @ratelimit(minutes=1, requests=30) @json.json_view def load_feed_statistics(request, feed_id): user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) stats = assemble_statistics(user, feed_id) - + logging.user(request, "~FBStatistics: ~SB%s" % (feed)) return stats + def load_feed_statistics_embedded(request, feed_id): user = get_user(request) feed = get_object_or_404(Feed, pk=feed_id) stats = assemble_statistics(user, feed_id) - + logging.user(request, "~FBStatistics (~FCembedded~FB): ~SB%s" % (feed)) - + return render( request, - 'rss_feeds/statistics.xhtml', + "rss_feeds/statistics.xhtml", { - 'stats': json.json_encode(stats), - 'feed_js': json.json_encode(feed.canonical()), - 'feed': feed, - } + "stats": json.json_encode(stats), + "feed_js": json.json_encode(feed.canonical()), + "feed": feed, + }, ) + def assemble_statistics(user, feed_id): user_timezone = user.profile.timezone stats = dict() @@ -194,76 +213,82 @@ def assemble_statistics(user, feed_id): feed.set_next_scheduled_update(verbose=True, skip_scheduling=True) feed.save_feed_story_history_statistics() feed.save_classifier_counts() - + # Dates of last and next update - stats['active'] = feed.active - stats['last_update'] = relative_timesince(feed.last_update) - stats['next_update'] = relative_timeuntil(feed.next_scheduled_update) - stats['push'] = feed.is_push - stats['fs_size_bytes'] = feed.fs_size_bytes - stats['archive_count'] = feed.archive_count + stats["active"] = feed.active + stats["last_update"] = relative_timesince(feed.last_update) + stats["next_update"] = relative_timeuntil(feed.next_scheduled_update) + stats["push"] = feed.is_push + stats["fs_size_bytes"] = feed.fs_size_bytes + stats["archive_count"] = feed.archive_count if feed.is_push: try: - stats['push_expires'] = localtime_for_timezone(feed.push.lease_expires, - user_timezone).strftime("%Y-%m-%d %H:%M:%S") + stats["push_expires"] = localtime_for_timezone(feed.push.lease_expires, user_timezone).strftime( + "%Y-%m-%d %H:%M:%S" + ) except PushSubscription.DoesNotExist: - stats['push_expires'] = 'Missing push' + stats["push_expires"] = "Missing push" feed.is_push = False feed.save() # Minutes between updates update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False) - stats['update_interval_minutes'] = update_interval_minutes + stats["update_interval_minutes"] = update_interval_minutes original_active_premium_subscribers = feed.active_premium_subscribers original_premium_subscribers = feed.premium_subscribers - feed.active_premium_subscribers = max(feed.active_premium_subscribers+1, 1) + feed.active_premium_subscribers = max(feed.active_premium_subscribers + 1, 1) feed.premium_subscribers += 1 - premium_update_interval_minutes = feed.get_next_scheduled_update(force=True, verbose=False, - premium_speed=True) + premium_update_interval_minutes = feed.get_next_scheduled_update( + force=True, verbose=False, premium_speed=True + ) feed.active_premium_subscribers = original_active_premium_subscribers feed.premium_subscribers = original_premium_subscribers - stats['premium_update_interval_minutes'] = premium_update_interval_minutes - stats['errors_since_good'] = feed.errors_since_good - + stats["premium_update_interval_minutes"] = premium_update_interval_minutes + stats["errors_since_good"] = feed.errors_since_good + # Stories per month - average and month-by-month breakout - average_stories_per_month, story_count_history = feed.average_stories_per_month, feed.data.story_count_history - stats['average_stories_per_month'] = average_stories_per_month + average_stories_per_month, story_count_history = ( + feed.average_stories_per_month, + feed.data.story_count_history, + ) + stats["average_stories_per_month"] = average_stories_per_month story_count_history = story_count_history and json.decode(story_count_history) if story_count_history and isinstance(story_count_history, dict): - stats['story_count_history'] = story_count_history['months'] - stats['story_days_history'] = story_count_history['days'] - stats['story_hours_history'] = story_count_history['hours'] + stats["story_count_history"] = story_count_history["months"] + stats["story_days_history"] = story_count_history["days"] + stats["story_hours_history"] = story_count_history["hours"] else: - stats['story_count_history'] = story_count_history - + stats["story_count_history"] = story_count_history + # Rotate hours to match user's timezone offset localoffset = user_timezone.utcoffset(datetime.datetime.utcnow()) hours_offset = int(localoffset.total_seconds() / 3600) rotated_hours = {} - for hour, value in list(stats['story_hours_history'].items()): - rotated_hours[str(int(hour)+hours_offset)] = value - stats['story_hours_history'] = rotated_hours - + for hour, value in list(stats["story_hours_history"].items()): + rotated_hours[str(int(hour) + hours_offset)] = value + stats["story_hours_history"] = rotated_hours + # Subscribers - stats['subscriber_count'] = feed.num_subscribers - stats['num_subscribers'] = feed.num_subscribers - stats['stories_last_month'] = feed.stories_last_month - stats['last_load_time'] = feed.last_load_time - stats['premium_subscribers'] = feed.premium_subscribers - stats['active_subscribers'] = feed.active_subscribers - stats['active_premium_subscribers'] = feed.active_premium_subscribers + stats["subscriber_count"] = feed.num_subscribers + stats["num_subscribers"] = feed.num_subscribers + stats["stories_last_month"] = feed.stories_last_month + stats["last_load_time"] = feed.last_load_time + stats["premium_subscribers"] = feed.premium_subscribers + stats["active_subscribers"] = feed.active_subscribers + stats["active_premium_subscribers"] = feed.active_premium_subscribers # Classifier counts - stats['classifier_counts'] = json.decode(feed.data.feed_classifier_counts) - + stats["classifier_counts"] = json.decode(feed.data.feed_classifier_counts) + # Fetch histories fetch_history = MFetchHistory.feed(feed_id, timezone=user_timezone) - stats['feed_fetch_history'] = fetch_history['feed_fetch_history'] - stats['page_fetch_history'] = fetch_history['page_fetch_history'] - stats['feed_push_history'] = fetch_history['push_history'] - + stats["feed_fetch_history"] = fetch_history["feed_fetch_history"] + stats["page_fetch_history"] = fetch_history["page_fetch_history"] + stats["feed_push_history"] = fetch_history["push_history"] + return stats + @json.json_view def load_feed_settings(request, feed_id): stats = dict() @@ -272,25 +297,26 @@ def load_feed_settings(request, feed_id): timezone = user.profile.timezone fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) - stats['feed_fetch_history'] = fetch_history['feed_fetch_history'] - stats['page_fetch_history'] = fetch_history['page_fetch_history'] - stats['feed_push_history'] = fetch_history['push_history'] - stats['duplicate_addresses'] = feed.duplicate_addresses.all() - + stats["feed_fetch_history"] = fetch_history["feed_fetch_history"] + stats["page_fetch_history"] = fetch_history["page_fetch_history"] + stats["feed_push_history"] = fetch_history["push_history"] + stats["duplicate_addresses"] = feed.duplicate_addresses.all() + return stats + @ratelimit(minutes=1, requests=30) @json.json_view def exception_retry(request): user = get_user(request) - feed_id = get_argument_or_404(request, 'feed_id') - reset_fetch = json.decode(request.POST['reset_fetch']) + feed_id = get_argument_or_404(request, "feed_id") + reset_fetch = json.decode(request.POST["reset_fetch"]) feed = Feed.get_by_id(feed_id) original_feed = feed - + if not feed: raise Http404 - + feed.schedule_feed_fetch_immediately() changed = False if feed.has_page_exception: @@ -303,18 +329,18 @@ def exception_retry(request): changed = True feed.active = True if changed: - feed.save(update_fields=['has_page_exception', 'has_feed_exception', 'active']) - + feed.save(update_fields=["has_page_exception", "has_feed_exception", "active"]) + original_fetched_once = feed.fetched_once if reset_fetch: logging.user(request, "~FRRefreshing exception feed: ~SB%s" % (feed)) feed.fetched_once = False else: logging.user(request, "~FRForcing refreshing feed: ~SB%s" % (feed)) - + feed.fetched_once = True if feed.fetched_once != original_fetched_once: - feed.save(update_fields=['fetched_once']) + feed.save(update_fields=["fetched_once"]) feed = feed.update(force=True, compute_scores=False, verbose=True) feed = Feed.get_by_id(feed.pk) @@ -327,26 +353,30 @@ def exception_retry(request): usersub = usersubs[0] usersub.switch_feed(feed, original_feed) else: - return {'code': -1} + return {"code": -1} usersub.calculate_feed_scores(silent=False) - + feeds = {feed.pk: usersub and usersub.canonical(full=True), feed_id: usersub.canonical(full=True)} - return {'code': 1, 'feeds': feeds} - - + return {"code": 1, "feeds": feeds} + + @ajax_login_required @json.json_view def exception_change_feed_address(request): - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed - feed_address = request.POST['feed_address'] + feed_address = request.POST["feed_address"] timezone = request.user.profile.timezone code = -1 if False and (feed.has_page_exception or feed.has_feed_exception): # Fix broken feed - logging.user(request, "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" % (feed, feed.feed_address, feed_address)) + logging.user( + request, + "~FRFixing feed exception by address: %s - ~SB%s~SN to ~SB%s" + % (feed, feed.feed_address, feed_address), + ) feed.has_feed_exception = False feed.active = True feed.fetched_once = False @@ -364,9 +394,13 @@ def exception_change_feed_address(request): merge_feeds(new_feed.pk, feed.pk) else: # Branch good feed - logging.user(request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address)) + logging.user( + request, "~FRBranching feed by address: ~SB%s~SN to ~SB%s" % (feed.feed_address, feed_address) + ) try: - feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link)) + feed = Feed.objects.get( + hash_address_and_link=Feed.generate_hash_address_and_link(feed_address, feed.feed_link) + ) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed_address, feed_link=feed.feed_link) code = 1 @@ -390,47 +424,50 @@ def exception_change_feed_address(request): else: fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': -1, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": -1, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } usersub.calculate_feed_scores(silent=False) - + feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id) - + feeds = { - original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers), + original_feed.pk: usersub and usersub.canonical(full=True, classifiers=classifiers), } - + if feed and feed.has_feed_exception: code = -1 fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': code, - 'feeds': feeds, - 'new_feed_id': usersub.feed_id, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": code, + "feeds": feeds, + "new_feed_id": usersub.feed_id, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } - + + @ajax_login_required @json.json_view def exception_change_feed_link(request): - feed_id = request.POST['feed_id'] + feed_id = request.POST["feed_id"] feed = get_object_or_404(Feed, pk=feed_id) original_feed = feed - feed_link = request.POST['feed_link'] + feed_link = request.POST["feed_link"] timezone = request.user.profile.timezone code = -1 - + if False and (feed.has_page_exception or feed.has_feed_exception): # Fix broken feed - logging.user(request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) + logging.user( + request, "~FRFixing feed exception by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link) + ) found_feed_urls = feedfinder.find_feeds(feed_link) if len(found_feed_urls): code = 1 @@ -451,7 +488,9 @@ def exception_change_feed_link(request): # Branch good feed logging.user(request, "~FRBranching feed by link: ~SB%s~SN to ~SB%s" % (feed.feed_link, feed_link)) try: - feed = Feed.objects.get(hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link)) + feed = Feed.objects.get( + hash_address_and_link=Feed.generate_hash_address_and_link(feed.feed_address, feed_link) + ) except Feed.DoesNotExist: feed = Feed.objects.create(feed_address=feed.feed_address, feed_link=feed_link) code = 1 @@ -476,81 +515,82 @@ def exception_change_feed_link(request): else: fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': -1, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": -1, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } - + usersub.calculate_feed_scores(silent=False) - + feed.update_all_statistics() classifiers = get_classifiers_for_user(usersub.user, feed_id=usersub.feed_id) - + if feed and feed.has_feed_exception: code = -1 - + feeds = { - original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), + original_feed.pk: usersub.canonical(full=True, classifiers=classifiers), } fetch_history = MFetchHistory.feed(feed_id, timezone=timezone) return { - 'code': code, - 'feeds': feeds, - 'new_feed_id': usersub.feed_id, - 'feed_fetch_history': fetch_history['feed_fetch_history'], - 'page_fetch_history': fetch_history['page_fetch_history'], - 'push_history': fetch_history['push_history'], + "code": code, + "feeds": feeds, + "new_feed_id": usersub.feed_id, + "feed_fetch_history": fetch_history["feed_fetch_history"], + "page_fetch_history": fetch_history["page_fetch_history"], + "push_history": fetch_history["push_history"], } + @login_required def status(request): if not request.user.is_staff and not settings.DEBUG: logging.user(request, "~SKNON-STAFF VIEWING RSS FEEDS STATUS!") assert False return HttpResponseForbidden() - minutes = int(request.GET.get('minutes', 1)) - now = datetime.datetime.now() + minutes = int(request.GET.get("minutes", 1)) + now = datetime.datetime.now() hour_ago = now + datetime.timedelta(minutes=minutes) - username = request.GET.get('user', '') or request.GET.get('username', '') + username = request.GET.get("user", "") or request.GET.get("username", "") if username == "all": - feeds = Feed.objects.filter(next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update') + feeds = Feed.objects.filter(next_scheduled_update__lte=hour_ago).order_by("next_scheduled_update") else: if username: user = User.objects.get(username=username) else: user = request.user usersubs = UserSubscription.objects.filter(user=user) - feed_ids = usersubs.values('feed_id') + feed_ids = usersubs.values("feed_id") if minutes > 0: - feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by('next_scheduled_update') + feeds = Feed.objects.filter(pk__in=feed_ids, next_scheduled_update__lte=hour_ago).order_by( + "next_scheduled_update" + ) else: - feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by('-last_update') - + feeds = Feed.objects.filter(pk__in=feed_ids, last_update__gte=hour_ago).order_by("-last_update") + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) queues = { - 'tasked_feeds': r.zcard('tasked_feeds'), - 'queued_feeds': r.scard('queued_feeds'), - 'scheduled_updates': r.zcard('scheduled_updates'), + "tasked_feeds": r.zcard("tasked_feeds"), + "queued_feeds": r.scard("queued_feeds"), + "scheduled_updates": r.zcard("scheduled_updates"), } - return render(request, 'rss_feeds/status.xhtml', { - 'feeds': feeds, - 'queues': queues - }) + return render(request, "rss_feeds/status.xhtml", {"feeds": feeds, "queues": queues}) + @json.json_view def original_text(request): # iOS sends a POST, web sends a GET GET_POST = getattr(request, request.method) - story_id = GET_POST.get('story_id') - feed_id = GET_POST.get('feed_id') - story_hash = GET_POST.get('story_hash', None) - force = GET_POST.get('force', False) - debug = GET_POST.get('debug', False) + story_id = GET_POST.get("story_id") + feed_id = GET_POST.get("feed_id") + story_hash = GET_POST.get("story_hash", None) + force = GET_POST.get("force", False) + debug = GET_POST.get("debug", False) if not story_hash and not story_id: - return {'code': -1, 'message': 'Missing story_hash.', 'original_text': None, 'failed': True} - + return {"code": -1, "message": "Missing story_hash.", "original_text": None, "failed": True} + if story_hash: story, _ = MStory.find_story(story_hash=story_hash) else: @@ -558,25 +598,26 @@ def original_text(request): if not story: logging.user(request, "~FYFetching ~FGoriginal~FY story text: ~FRstory not found") - return {'code': -1, 'message': 'Story not found.', 'original_text': None, 'failed': True} - + return {"code": -1, "message": "Story not found.", "original_text": None, "failed": True} + original_text = story.fetch_original_text(force=force, request=request, debug=debug) return { - 'feed_id': story.story_feed_id, - 'story_hash': story.story_hash, - 'story_id': story.story_guid, - 'image_urls': story.image_urls, - 'secure_image_urls': Feed.secure_image_urls(story.image_urls), - 'original_text': original_text, - 'failed': not original_text or len(original_text) < 100, + "feed_id": story.story_feed_id, + "story_hash": story.story_hash, + "story_id": story.story_guid, + "image_urls": story.image_urls, + "secure_image_urls": Feed.secure_image_urls(story.image_urls), + "original_text": original_text, + "failed": not original_text or len(original_text) < 100, } -@required_params('story_hash', method="GET") + +@required_params("story_hash", method="GET") def original_story(request): - story_hash = request.GET.get('story_hash') - force = request.GET.get('force', False) - debug = request.GET.get('debug', False) + story_hash = request.GET.get("story_hash") + force = request.GET.get("force", False) + debug = request.GET.get("debug", False) story, _ = MStory.find_story(story_hash=story_hash) @@ -584,22 +625,20 @@ def original_story(request): logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found") # return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True} raise Http404 - + original_page = story.fetch_original_page(force=force, request=request, debug=debug) return HttpResponse(original_page or "") -@required_params('story_hash', method="GET") + +@required_params("story_hash", method="GET") @json.json_view def story_changes(request): - story_hash = request.GET.get('story_hash', None) - show_changes = is_true(request.GET.get('show_changes', True)) + story_hash = request.GET.get("story_hash", None) + show_changes = is_true(request.GET.get("show_changes", True)) story, _ = MStory.find_story(story_hash=story_hash) if not story: logging.user(request, "~FYFetching ~FGoriginal~FY story page: ~FRstory not found") - return {'code': -1, 'message': 'Story not found.', 'original_page': None, 'failed': True} - - return { - 'story': Feed.format_story(story, show_changes=show_changes) - } - \ No newline at end of file + return {"code": -1, "message": "Story not found.", "original_page": None, "failed": True} + + return {"story": Feed.format_story(story, show_changes=show_changes)} diff --git a/apps/search/management/commands/index_feeds.py b/apps/search/management/commands/index_feeds.py index c3e2ee37dc..92623d723d 100644 --- a/apps/search/management/commands/index_feeds.py +++ b/apps/search/management/commands/index_feeds.py @@ -1,14 +1,22 @@ from django.core.management.base import BaseCommand from apps.rss_feeds.models import Feed -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): - parser.add_argument("-o", "--offset", dest="offset", type=int, default=0, help="Specify offset to start at") - parser.add_argument("-s", "--subscribers", dest="subscribers", type=int, default=2, help="Specify minimum number of subscribers") + parser.add_argument( + "-o", "--offset", dest="offset", type=int, default=0, help="Specify offset to start at" + ) + parser.add_argument( + "-s", + "--subscribers", + dest="subscribers", + type=int, + default=2, + help="Specify minimum number of subscribers", + ) def handle(self, *args, **options): - offset = options['offset'] - subscribers = options.get('subscribers', None) + offset = options["offset"] + subscribers = options.get("subscribers", None) Feed.index_all_for_search(offset=offset, subscribers=subscribers) - \ No newline at end of file diff --git a/apps/search/management/commands/index_stories.py b/apps/search/management/commands/index_stories.py index b63faa98c4..7a673b0c9f 100644 --- a/apps/search/management/commands/index_stories.py +++ b/apps/search/management/commands/index_stories.py @@ -4,33 +4,33 @@ from apps.rss_feeds.models import Feed, MStory from apps.reader.models import UserSubscription -class Command(BaseCommand): +class Command(BaseCommand): def add_arguments(self, parser): parser.add_argument("-u", "--user", dest="user", type=str, help="Specify user id or username") - parser.add_argument("-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories.") - + parser.add_argument( + "-R", "--reindex", dest="reindex", action="store_true", help="Drop index and reindex all stories." + ) def handle(self, *args, **options): - if options['reindex']: + if options["reindex"]: MStory.index_all_for_search() return - - if not options['user']: + + if not options["user"]: print("Missing user. Did you want to reindex everything? Use -R.") return - - if re.match(r"([0-9]+)", options['user']): - user = User.objects.get(pk=int(options['user'])) + + if re.match(r"([0-9]+)", options["user"]): + user = User.objects.get(pk=int(options["user"])) else: - user = User.objects.get(username=options['user']) - + user = User.objects.get(username=options["user"]) + subscriptions = UserSubscription.objects.filter(user=user) print(" ---> Indexing %s feeds..." % subscriptions.count()) - + for sub in subscriptions: try: sub.feed.index_stories_for_search() except Feed.DoesNotExist: print(" ***> Couldn't find %s" % sub.feed_id) - \ No newline at end of file diff --git a/apps/search/models.py b/apps/search/models.py index d4a6b3fe5b..59479f8741 100644 --- a/apps/search/models.py +++ b/apps/search/models.py @@ -17,32 +17,33 @@ from utils import log as logging from utils.feed_functions import chunks + class MUserSearch(mongo.Document): - '''Search index state of a user's subscriptions.''' - user_id = mongo.IntField(unique=True) - last_search_date = mongo.DateTimeField() - subscriptions_indexed = mongo.BooleanField() - subscriptions_indexing = mongo.BooleanField() - + """Search index state of a user's subscriptions.""" + + user_id = mongo.IntField(unique=True) + last_search_date = mongo.DateTimeField() + subscriptions_indexed = mongo.BooleanField() + subscriptions_indexing = mongo.BooleanField() + meta = { - 'collection': 'user_search', - 'indexes': ['user_id'], - 'allow_inheritance': False, + "collection": "user_search", + "indexes": ["user_id"], + "allow_inheritance": False, } - + @classmethod def get_user(cls, user_id, create=True): try: - user_search = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY)\ - .get(user_id=user_id) + user_search = cls.objects.read_preference(pymongo.ReadPreference.PRIMARY).get(user_id=user_id) except cls.DoesNotExist: if create: user_search = cls.objects.create(user_id=user_id) else: user_search = None - + return user_search - + def touch_search_date(self): if not self.subscriptions_indexed and not self.subscriptions_indexing: self.schedule_index_subscriptions_for_search() @@ -52,62 +53,63 @@ def touch_search_date(self): self.save() def schedule_index_subscriptions_for_search(self): - IndexSubscriptionsForSearch.apply_async(kwargs=dict(user_id=self.user_id), - queue='search_indexer') - + IndexSubscriptionsForSearch.apply_async(kwargs=dict(user_id=self.user_id), queue="search_indexer") + # Should be run as a background task def index_subscriptions_for_search(self): from apps.rss_feeds.models import Feed from apps.reader.models import UserSubscription - + SearchStory.create_elasticsearch_mapping() - + start = time.time() user = User.objects.get(pk=self.user_id) r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish(user.username, 'search_index_complete:start') - - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + r.publish(user.username, "search_index_complete:start") + + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() - + feed_ids = [] for sub in subscriptions: try: feed_ids.append(sub.feed.pk) except Feed.DoesNotExist: continue - + feed_id_chunks = [c for c in chunks(feed_ids, 6)] - logging.user(user, "~FCIndexing ~SB%s feeds~SN in %s chunks..." % - (total, len(feed_id_chunks))) - - search_chunks = [IndexSubscriptionsChunkForSearch.s(feed_ids=feed_id_chunk, - user_id=self.user_id - ).set(queue='search_indexer') - for feed_id_chunk in feed_id_chunks] - callback = FinishIndexSubscriptionsForSearch.s(user_id=self.user_id, - start=start).set(queue='search_indexer') + logging.user(user, "~FCIndexing ~SB%s feeds~SN in %s chunks..." % (total, len(feed_id_chunks))) + + search_chunks = [ + IndexSubscriptionsChunkForSearch.s(feed_ids=feed_id_chunk, user_id=self.user_id).set( + queue="search_indexer" + ) + for feed_id_chunk in feed_id_chunks + ] + callback = FinishIndexSubscriptionsForSearch.s(user_id=self.user_id, start=start).set( + queue="search_indexer" + ) celery.chord(search_chunks)(callback) def finish_index_subscriptions_for_search(self, start): from apps.reader.models import UserSubscription - + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=self.user_id) - subscriptions = UserSubscription.objects.filter(user=user).only('feed') + subscriptions = UserSubscription.objects.filter(user=user).only("feed") total = subscriptions.count() duration = time.time() - start - logging.user(user, "~FCIndexed ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % - (total, round(duration, 2))) - r.publish(user.username, 'search_index_complete:done') - + logging.user(user, "~FCIndexed ~SB%s feeds~SN in ~FM~SB%s~FC~SN sec." % (total, round(duration, 2))) + r.publish(user.username, "search_index_complete:done") + self.subscriptions_indexed = True self.subscriptions_indexing = False self.save() - + def index_subscriptions_chunk_for_search(self, feed_ids): from apps.rss_feeds.models import Feed + r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) user = User.objects.get(pk=self.user_id) @@ -115,40 +117,41 @@ def index_subscriptions_chunk_for_search(self, feed_ids): for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.index_stories_for_search() - - r.publish(user.username, 'search_index_complete:feeds:%s' % - ','.join([str(f) for f in feed_ids])) - + + r.publish(user.username, "search_index_complete:feeds:%s" % ",".join([str(f) for f in feed_ids])) + @classmethod def schedule_index_feeds_for_search(cls, feed_ids, user_id): user_search = cls.get_user(user_id, create=False) - if (not user_search or - not user_search.subscriptions_indexed or - user_search.subscriptions_indexing): + if not user_search or not user_search.subscriptions_indexed or user_search.subscriptions_indexing: # User hasn't searched before. return - + if not isinstance(feed_ids, list): feed_ids = [feed_ids] - IndexFeedsForSearch.apply_async(kwargs=dict(feed_ids=feed_ids, user_id=user_id), - queue='search_indexer') - + IndexFeedsForSearch.apply_async( + kwargs=dict(feed_ids=feed_ids, user_id=user_id), queue="search_indexer" + ) + @classmethod def index_feeds_for_search(cls, feed_ids, user_id): from apps.rss_feeds.models import Feed + user = User.objects.get(pk=user_id) logging.user(user, "~SB~FCIndexing %s~FC by request..." % feed_ids) for feed_id in feed_ids: feed = Feed.get_by_id(feed_id) - if not feed: continue - + if not feed: + continue + feed.index_stories_for_search() - + @classmethod def remove_all(cls, drop_index=False): # You only need to drop the index if there is data you want to clear. @@ -156,7 +159,7 @@ def remove_all(cls, drop_index=False): if drop_index: logging.info(" ---> ~FRRemoving stories search index...") SearchStory.drop() - + user_searches = cls.objects.all() logging.info(" ---> ~SN~FRRemoving ~SB%s~SN user searches..." % user_searches.count()) for user_search in user_searches: @@ -164,7 +167,7 @@ def remove_all(cls, drop_index=False): user_search.remove() except Exception as e: print(" ****> Error on search removal: %s" % e) - + def remove(self): from apps.rss_feeds.models import Feed from apps.reader.models import UserSubscription @@ -173,7 +176,7 @@ def remove(self): subscriptions = UserSubscription.objects.filter(user=self.user_id) total = subscriptions.count() removed = 0 - + for sub in subscriptions: try: feed = sub.feed @@ -184,33 +187,36 @@ def remove(self): feed.search_indexed = False feed.save() removed += 1 - - logging.user(user, "~FCRemoved ~SB%s/%s feed's search indexes~SN for ~SB~FB%s~FC~SN." % - (removed, total, user.username)) + + logging.user( + user, + "~FCRemoved ~SB%s/%s feed's search indexes~SN for ~SB~FB%s~FC~SN." + % (removed, total, user.username), + ) self.delete() + class SearchStory: - _es_client = None name = "stories" - + @classmethod def ES(cls): if cls._es_client is None: cls._es_client = elasticsearch.Elasticsearch(settings.ELASTICSEARCH_STORY_HOST) cls.create_elasticsearch_mapping() return cls._es_client - + @classmethod def index_name(cls): return "%s-index" % cls.name - + @classmethod def doc_type(cls): - if settings.DOCKERBUILD or getattr(settings, 'ES_IGNORE_TYPE', True): + if settings.DOCKERBUILD or getattr(settings, "ES_IGNORE_TYPE", True): return None return "%s-type" % cls.name - + @classmethod def create_elasticsearch_mapping(cls, delete=False): if delete: @@ -222,83 +228,76 @@ def create_elasticsearch_mapping(cls, delete=False): if cls.ES().indices.exists(cls.index_name()): return - + try: cls.ES().indices.create(cls.index_name()) logging.debug(" ---> ~FCCreating search index for ~FM%s" % cls.index_name()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRCould not create search index for ~FM%s: %s" % (cls.index_name(), e)) return - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError, - urllib3.exceptions.ConnectTimeoutError) as e: - logging.debug( - f" ***> ~FRNo search server available for creating story mapping: {e}") + except ( + elasticsearch.exceptions.ConnectionError, + urllib3.exceptions.NewConnectionError, + urllib3.exceptions.ConnectTimeoutError, + ) as e: + logging.debug(f" ***> ~FRNo search server available for creating story mapping: {e}") return mapping = { - 'title': { - 'store': False, - 'type': 'text', - 'analyzer': 'snowball', + "title": { + "store": False, + "type": "text", + "analyzer": "snowball", "term_vector": "yes", }, - 'content': { - 'store': False, - 'type': 'text', - 'analyzer': 'snowball', + "content": { + "store": False, + "type": "text", + "analyzer": "snowball", "term_vector": "yes", }, - 'tags': { - 'store': False, + "tags": { + "store": False, "type": "text", - "fields": { - "raw": { - "type": "text", - "analyzer": "keyword", - "term_vector": "yes" - } - } + "fields": {"raw": {"type": "text", "analyzer": "keyword", "term_vector": "yes"}}, }, - 'author': { - 'store': False, - 'type': 'text', - 'analyzer': 'default', + "author": { + "store": False, + "type": "text", + "analyzer": "default", }, - 'feed_id': { - 'store': False, - 'type': 'integer' + "feed_id": {"store": False, "type": "integer"}, + "date": { + "store": False, + "type": "date", }, - 'date': { - 'store': False, - 'type': 'date', - } } - cls.ES().indices.put_mapping(body={ - 'properties': mapping, - }, index=cls.index_name()) + cls.ES().indices.put_mapping( + body={ + "properties": mapping, + }, + index=cls.index_name(), + ) cls.ES().indices.flush(cls.index_name()) @classmethod - def index(cls, story_hash, story_title, story_content, story_tags, story_author, story_feed_id, - story_date): + def index( + cls, story_hash, story_title, story_content, story_tags, story_author, story_feed_id, story_date + ): cls.create_elasticsearch_mapping() doc = { "content": story_content, "title": story_title, - "tags": ', '.join(story_tags), + "tags": ", ".join(story_tags), "author": story_author, "feed_id": story_feed_id, "date": story_date, } try: - cls.ES().create(index=cls.index_name(), id=story_hash, - body=doc, doc_type=cls.doc_type()) - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError) as e: - logging.debug( - f" ***> ~FRNo search server available for story indexing: {e}") + cls.ES().create(index=cls.index_name(), id=story_hash, body=doc, doc_type=cls.doc_type()) + except (elasticsearch.exceptions.ConnectionError, urllib3.exceptions.NewConnectionError) as e: + logging.debug(f" ***> ~FRNo search server available for story indexing: {e}") except elasticsearch.exceptions.ConflictError as e: logging.debug(f" ***> ~FBAlready indexed story: {e}") # if settings.DEBUG: @@ -312,10 +311,10 @@ def remove(cls, story_hash): try: cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type=cls.doc_type()) except elasticsearch.exceptions.NotFoundError: - cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type='story-type') + cls.ES().delete(index=cls.index_name(), id=story_hash, doc_type="story-type") except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available for story deletion: {e}") - + @classmethod def drop(cls): try: @@ -323,7 +322,6 @@ def drop(cls): except elasticsearch.exceptions.NotFoundError: logging.debug(" ***> ~FBNo index found, nothing to drop.") - @classmethod def query(cls, feed_ids, query, order, offset, limit, strip=False): try: @@ -331,26 +329,26 @@ def query(cls, feed_ids, query, order, offset, limit, strip=False): except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available: {e}") return [] - + if strip: - query = re.sub(r'([^\s\w_\-])+', ' ', query) # Strip non-alphanumeric + query = re.sub(r"([^\s\w_\-])+", " ", query) # Strip non-alphanumeric query = html.unescape(query) body = { "query": { "bool": { "must": [ - {"query_string": { "query": query, "default_operator": "AND" }}, - {"terms": { "feed_id": feed_ids[:2000] }}, + {"query_string": {"query": query, "default_operator": "AND"}}, + {"terms": {"feed_id": feed_ids[:2000]}}, ] } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] @@ -373,44 +371,46 @@ def query(cls, feed_ids, query, order, offset, limit, strip=False): # logging.debug(" ***> ~FRNo search server available.") # return [] - logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" % - (query, len(results['hits']['hits']), len(feed_ids), 's' if len(feed_ids) != 1 else '')) - + logging.info( + " ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" + % (query, len(results["hits"]["hits"]), len(feed_ids), "s" if len(feed_ids) != 1 else "") + ) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids - + @classmethod def global_query(cls, query, order, offset, limit, strip=False): cls.create_elasticsearch_mapping() cls.ES().indices.flush() - + if strip: - query = re.sub(r'([^\s\w_\-])+', ' ', query) # Strip non-alphanumeric + query = re.sub(r"([^\s\w_\-])+", " ", query) # Strip non-alphanumeric query = html.unescape(query) body = { "query": { "bool": { "must": [ - {"query_string": { "query": query, "default_operator": "AND" }}, + {"query_string": {"query": query, "default_operator": "AND"}}, ] } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] - + # sort = "date:desc" if order == "newest" else "date:asc" # string_q = pyes.query.QueryStringQuery(query, default_operator="AND") # try: @@ -420,17 +420,16 @@ def global_query(cls, query, order, offset, limit, strip=False): # logging.debug(" ***> ~FRNo search server available.") # return [] - logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN (across all feeds)" % - (query)) - + logging.info(" ---> ~FG~SNSearch ~FCstories~FG for: ~SB%s~SN (across all feeds)" % (query)) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids - + @classmethod def more_like_this(cls, feed_ids, story_hash, order, offset, limit): try: @@ -438,52 +437,54 @@ def more_like_this(cls, feed_ids, story_hash, order, offset, limit): except elasticsearch.exceptions.NotFoundError as e: logging.debug(f" ***> ~FRNo search server available: {e}") return [] - + body = { "query": { "bool": { - "filter": [{ - "more_like_this": { - "fields": [ "title", "content" ], - "like": [ - { - "_index": cls.index_name(), - "_id": story_hash, - } - ], - "min_term_freq": 3, - "min_doc_freq": 2, - "min_word_length": 4, + "filter": [ + { + "more_like_this": { + "fields": ["title", "content"], + "like": [ + { + "_index": cls.index_name(), + "_id": story_hash, + } + ], + "min_term_freq": 3, + "min_doc_freq": 2, + "min_word_length": 4, + }, }, - },{ - "terms": { "feed_id": feed_ids[:2000] } - }], + {"terms": {"feed_id": feed_ids[:2000]}}, + ], } }, - 'sort': [{'date': {'order': 'desc' if order == "newest" else "asc"}}], - 'from': offset, - 'size': limit + "sort": [{"date": {"order": "desc" if order == "newest" else "asc"}}], + "from": offset, + "size": limit, } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] - logging.info(" ---> ~FG~SNMore like this ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" % - (story_hash, len(results['hits']['hits']), len(feed_ids), 's' if len(feed_ids) != 1 else '')) - + logging.info( + " ---> ~FG~SNMore like this ~FCstories~FG for: ~SB%s~SN, ~SB%s~SN results (across %s feed%s)" + % (story_hash, len(results["hits"]["hits"]), len(feed_ids), "s" if len(feed_ids) != 1 else "") + ) + try: - result_ids = [r['_id'] for r in results['hits']['hits']] + result_ids = [r["_id"] for r in results["hits"]["hits"]] except Exception as e: - logging.info(" ---> ~FRInvalid search query \"%s\": %s" % (query, e)) + logging.info(' ---> ~FRInvalid search query "%s": %s' % (query, e)) return [] - + return result_ids class SearchFeed: - _es_client = None name = "feeds" @@ -493,18 +494,18 @@ def ES(cls): cls._es_client = elasticsearch.Elasticsearch(settings.ELASTICSEARCH_FEED_HOST) cls.create_elasticsearch_mapping() return cls._es_client - + @classmethod def index_name(cls): # feeds-index return "%s-index" % cls.name - + @classmethod def doc_type(cls): - if settings.DOCKERBUILD or getattr(settings, 'ES_IGNORE_TYPE', True): + if settings.DOCKERBUILD or getattr(settings, "ES_IGNORE_TYPE", True): return None return "%s-type" % cls.name - + @classmethod def create_elasticsearch_mapping(cls, delete=False): if delete: @@ -518,22 +519,18 @@ def create_elasticsearch_mapping(cls, delete=False): return index_settings = { - "index" : { + "index": { "analysis": { "analyzer": { "edgengram_analyzer": { "filter": ["edgengram_analyzer"], "tokenizer": "lowercase", - "type": "custom" + "type": "custom", }, }, "filter": { - "edgengram_analyzer": { - "max_gram": "15", - "min_gram": "1", - "type": "edge_ngram" - }, - } + "edgengram_analyzer": {"max_gram": "15", "min_gram": "1", "type": "edge_ngram"}, + }, } } } @@ -544,43 +541,42 @@ def create_elasticsearch_mapping(cls, delete=False): except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRCould not create search index for ~FM%s: %s" % (cls.index_name(), e)) return - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError, - urllib3.exceptions.ConnectTimeoutError) as e: + except ( + elasticsearch.exceptions.ConnectionError, + urllib3.exceptions.NewConnectionError, + urllib3.exceptions.ConnectTimeoutError, + ) as e: logging.debug(f" ***> ~FRNo search server available for creating feed mapping: {e}") return - + mapping = { "feed_address": { - 'analyzer': 'snowball', + "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" - }, - "feed_id": { - "store": True, - "type": "text" - }, - "num_subscribers": { - "store": True, - "type": "long" + "type": "text", }, + "feed_id": {"store": True, "type": "text"}, + "num_subscribers": {"store": True, "type": "long"}, "title": { "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" + "type": "text", }, "link": { "analyzer": "snowball", "store": False, "term_vector": "with_positions_offsets", - "type": "text" - } + "type": "text", + }, } - cls.ES().indices.put_mapping(body={ - 'properties': mapping, - }, index=cls.index_name()) + cls.ES().indices.put_mapping( + body={ + "properties": mapping, + }, + index=cls.index_name(), + ) cls.ES().indices.flush(cls.index_name()) @classmethod @@ -594,8 +590,7 @@ def index(cls, feed_id, title, address, link, num_subscribers): } try: cls.ES().create(index=cls.index_name(), id=feed_id, body=doc, doc_type=cls.doc_type()) - except (elasticsearch.exceptions.ConnectionError, - urllib3.exceptions.NewConnectionError) as e: + except (elasticsearch.exceptions.ConnectionError, urllib3.exceptions.NewConnectionError) as e: logging.debug(f" ***> ~FRNo search server available for feed indexing: {e}") @classmethod @@ -615,21 +610,45 @@ def query(cls, text, max_subscribers=5): if settings.DEBUG: max_subscribers = 1 - + body = { "query": { "bool": { "should": [ - {"match": { "address": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, - {"match": { "title": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, - {"match": { "link": { "query": text, 'cutoff_frequency': "0.0005", 'minimum_should_match': "75%" } }}, + { + "match": { + "address": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, + { + "match": { + "title": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, + { + "match": { + "link": { + "query": text, + "cutoff_frequency": "0.0005", + "minimum_should_match": "75%", + } + } + }, ] } }, - 'sort': [{'num_subscribers': {'order': 'desc'}}], + "sort": [{"num_subscribers": {"order": "desc"}}], } try: - results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) + results = cls.ES().search(body=body, index=cls.index_name(), doc_type=cls.doc_type()) except elasticsearch.exceptions.RequestError as e: logging.debug(" ***> ~FRNo search server available for querying: %s" % e) return [] @@ -651,19 +670,22 @@ def query(cls, text, max_subscribers=5): # q.add_should(pyes.query.MatchQuery('title', text, analyzer="simple", cutoff_frequency=0.0005, minimum_should_match="75%")) # q = pyes.Search(q, min_score=1) # results = cls.ES().search(query=q, size=max_subscribers, sort="num_subscribers:desc") - - logging.info("~FGSearch ~FCfeeds~FG: ~SB%s~SN, ~SB%s~SN results" % (text, len(results['hits']['hits']))) - return results['hits']['hits'] - + logging.info( + "~FGSearch ~FCfeeds~FG: ~SB%s~SN, ~SB%s~SN results" % (text, len(results["hits"]["hits"])) + ) + + return results["hits"]["hits"] + @classmethod def export_csv(cls): import djqscsv from apps.rss_feeds.models import Feed - qs = Feed.objects.filter(num_subscribers__gte=20).values('id', 'feed_title', 'feed_address', 'feed_link', 'num_subscribers') + qs = Feed.objects.filter(num_subscribers__gte=20).values( + "id", "feed_title", "feed_address", "feed_link", "num_subscribers" + ) csv = djqscsv.render_to_csv_response(qs).content - f = open('feeds.csv', 'w+') + f = open("feeds.csv", "w+") f.write(csv) f.close() - diff --git a/apps/search/tasks.py b/apps/search/tasks.py index 3ae7acf846..d56c892cf0 100644 --- a/apps/search/tasks.py +++ b/apps/search/tasks.py @@ -1,27 +1,31 @@ from newsblur_web.celeryapp import app from utils import log as logging + @app.task() def IndexSubscriptionsForSearch(user_id): from apps.search.models import MUserSearch - + user_search = MUserSearch.get_user(user_id) user_search.index_subscriptions_for_search() + @app.task() def IndexSubscriptionsChunkForSearch(feed_ids, user_id): logging.debug(" ---> Indexing: %s for %s" % (feed_ids, user_id)) from apps.search.models import MUserSearch - + user_search = MUserSearch.get_user(user_id) user_search.index_subscriptions_chunk_for_search(feed_ids) + @app.task() def IndexFeedsForSearch(feed_ids, user_id): from apps.search.models import MUserSearch - + MUserSearch.index_feeds_for_search(feed_ids, user_id) + @app.task() def FinishIndexSubscriptionsForSearch(results, user_id, start): logging.debug(" ---> Indexing finished for %s" % (user_id)) diff --git a/apps/search/urls.py b/apps/search/urls.py index e29e860a9b..fcc841b86f 100644 --- a/apps/search/urls.py +++ b/apps/search/urls.py @@ -3,5 +3,5 @@ urlpatterns = [ # url(r'^$', views.index), - url(r'^more_like_this', views.more_like_this, name='more-like-this'), + url(r"^more_like_this", views.more_like_this, name="more-like-this"), ] diff --git a/apps/search/views.py b/apps/search/views.py index e067315714..37630b5eea 100644 --- a/apps/search/views.py +++ b/apps/search/views.py @@ -5,23 +5,24 @@ from utils.view_functions import required_params from utils.user_functions import get_user, ajax_login_required + # @required_params('story_hash') @json.json_view def more_like_this(request): user = get_user(request) get_post = getattr(request, request.method) - order = get_post.get('order', 'newest') - page = int(get_post.get('page', 1)) - limit = int(get_post.get('limit', 10)) - offset = limit * (page-1) - story_hash = get_post.get('story_hash') - + order = get_post.get("order", "newest") + page = int(get_post.get("page", 1)) + limit = int(get_post.get("limit", 10)) + offset = limit * (page - 1) + story_hash = get_post.get("story_hash") + feed_ids = [us.feed_id for us in UserSubscription.objects.filter(user=user)] feed_ids, _ = MStory.split_story_hash(story_hash) story_ids = SearchStory.more_like_this([feed_ids], story_hash, order, offset=offset, limit=limit) - stories_db = MStory.objects( - story_hash__in=story_ids - ).order_by('-story_date' if order == "newest" else 'story_date') + stories_db = MStory.objects(story_hash__in=story_ids).order_by( + "-story_date" if order == "newest" else "story_date" + ) stories = Feed.format_stories(stories_db) return { diff --git a/apps/social/management/commands/popular_stories.py b/apps/social/management/commands/popular_stories.py index 93cb394e09..093dc049ac 100644 --- a/apps/social/management/commands/popular_stories.py +++ b/apps/social/management/commands/popular_stories.py @@ -1,7 +1,7 @@ from django.core.management.base import BaseCommand from apps.social.models import MSharedStory -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - MSharedStory.share_popular_stories() \ No newline at end of file + MSharedStory.share_popular_stories() diff --git a/apps/social/migrations/0001_username_unique.py b/apps/social/migrations/0001_username_unique.py index f9f45fb478..ba55f6399d 100644 --- a/apps/social/migrations/0001_username_unique.py +++ b/apps/social/migrations/0001_username_unique.py @@ -4,19 +4,17 @@ from django.conf import settings import pymongo + def remove_unique_index(apps, schema_editor): - social_profile = sp = settings.MONGODB[settings.MONGO_DB_NAME].social_profile + social_profile = sp = settings.MONGODB[settings.MONGO_DB_NAME].social_profile try: - social_profile.drop_index('username_1') + social_profile.drop_index("username_1") except pymongo.errors.OperationFailure: print(" ***> Couldn't delete username_1 index on social_profile collection. Already deleted?") pass -class Migration(migrations.Migration): - dependencies = [ - ] +class Migration(migrations.Migration): + dependencies = [] - operations = [ - migrations.RunPython(remove_unique_index) - ] + operations = [migrations.RunPython(remove_unique_index)] diff --git a/apps/social/models.py b/apps/social/models.py index 2b62e679b5..f4950a6a23 100644 --- a/apps/social/models.py +++ b/apps/social/models.py @@ -26,7 +26,12 @@ from django.utils.encoding import smart_bytes, smart_str from apps.reader.models import UserSubscription, RUserStory from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.rss_feeds.models import Feed, MStory from apps.rss_feeds.text_importer import TextImporter from apps.rss_feeds.page_importer import PageImporter @@ -47,36 +52,35 @@ pass RECOMMENDATIONS_LIMIT = 5 -IGNORE_IMAGE_SOURCES = [ - "http://feeds.feedburner.com" -] +IGNORE_IMAGE_SOURCES = ["http://feeds.feedburner.com"] + class MRequestInvite(mongo.Document): - email = mongo.EmailField() - request_date = mongo.DateTimeField(default=datetime.datetime.now) - invite_sent = mongo.BooleanField(default=False) + email = mongo.EmailField() + request_date = mongo.DateTimeField(default=datetime.datetime.now) + invite_sent = mongo.BooleanField(default=False) invite_sent_date = mongo.DateTimeField() meta = { - 'collection': 'social_invites', - 'allow_inheritance': False, + "collection": "social_invites", + "allow_inheritance": False, } - + def __str__(self): - return "%s%s" % (self.email, '*' if self.invite_sent else '') - + return "%s%s" % (self.email, "*" if self.invite_sent else "") + @classmethod def blast(cls): invites = cls.objects.filter(email_sent=None) - print(' ---> Found %s invites...' % invites.count()) - + print(" ---> Found %s invites..." % invites.count()) + for invite in invites: try: invite.send_email() except: - print(' ***> Could not send invite to: %s. Deleting.' % invite.username) + print(" ***> Could not send invite to: %s. Deleting." % invite.username) invite.delete() - + def send_email(self): user = User.objects.filter(username__iexact=self.username) if not user: @@ -86,84 +90,88 @@ def send_email(self): email = user.email or self.username else: user = { - 'username': self.username, - 'profile': { - 'autologin_url': '/', - } + "username": self.username, + "profile": { + "autologin_url": "/", + }, } email = self.username params = { - 'user': user, + "user": user, } - text = render_to_string('mail/email_social_beta.txt', params) - html = render_to_string('mail/email_social_beta.xhtml', params) + text = render_to_string("mail/email_social_beta.txt", params) + html = render_to_string("mail/email_social_beta.xhtml", params) subject = "Psst, you're in..." - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['<%s>' % (email)]) + msg = EmailMultiAlternatives( + subject, text, from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, to=["<%s>" % (email)] + ) msg.attach_alternative(html, "text/html") msg.send() - + self.email_sent = True self.save() - + logging.debug(" ---> ~BB~FM~SBSending email for social beta: %s" % self.username) class MSocialProfile(mongo.Document): - user_id = mongo.IntField(unique=True) - username = mongo.StringField(max_length=30) - email = mongo.StringField() - bio = mongo.StringField(max_length=160) - blurblog_title = mongo.StringField(max_length=256) - custom_bgcolor = mongo.StringField(max_length=50) - custom_css = mongo.StringField() - photo_url = mongo.StringField() - photo_service = mongo.StringField() - location = mongo.StringField(max_length=40) - website = mongo.StringField(max_length=200) - bb_permalink_direct = mongo.BooleanField() - subscription_count = mongo.IntField(default=0) + user_id = mongo.IntField(unique=True) + username = mongo.StringField(max_length=30) + email = mongo.StringField() + bio = mongo.StringField(max_length=160) + blurblog_title = mongo.StringField(max_length=256) + custom_bgcolor = mongo.StringField(max_length=50) + custom_css = mongo.StringField() + photo_url = mongo.StringField() + photo_service = mongo.StringField() + location = mongo.StringField(max_length=40) + website = mongo.StringField(max_length=200) + bb_permalink_direct = mongo.BooleanField() + subscription_count = mongo.IntField(default=0) shared_stories_count = mongo.IntField(default=0) - following_count = mongo.IntField(default=0) - follower_count = mongo.IntField(default=0) - following_user_ids = mongo.ListField(mongo.IntField()) - follower_user_ids = mongo.ListField(mongo.IntField()) - unfollowed_user_ids = mongo.ListField(mongo.IntField()) + following_count = mongo.IntField(default=0) + follower_count = mongo.IntField(default=0) + following_user_ids = mongo.ListField(mongo.IntField()) + follower_user_ids = mongo.ListField(mongo.IntField()) + unfollowed_user_ids = mongo.ListField(mongo.IntField()) requested_follow_user_ids = mongo.ListField(mongo.IntField()) - muting_user_ids = mongo.ListField(mongo.IntField()) - muted_by_user_ids = mongo.ListField(mongo.IntField()) - popular_publishers = mongo.StringField() - stories_last_month = mongo.IntField(default=0) + muting_user_ids = mongo.ListField(mongo.IntField()) + muted_by_user_ids = mongo.ListField(mongo.IntField()) + popular_publishers = mongo.StringField() + stories_last_month = mongo.IntField(default=0) average_stories_per_month = mongo.IntField(default=0) - story_count_history = mongo.ListField() - story_days_history = mongo.DictField() - story_hours_history = mongo.DictField() - story_email_history = mongo.ListField() + story_count_history = mongo.ListField() + story_days_history = mongo.DictField() + story_hours_history = mongo.DictField() + story_email_history = mongo.ListField() feed_classifier_counts = mongo.DictField() - favicon_color = mongo.StringField(max_length=6) - protected = mongo.BooleanField() - private = mongo.BooleanField() - + favicon_color = mongo.StringField(max_length=6) + protected = mongo.BooleanField() + private = mongo.BooleanField() + meta = { - 'collection': 'social_profile', - 'indexes': [ - 'user_id', - 'username', - 'following_user_ids', - 'follower_user_ids', - 'unfollowed_user_ids', - 'requested_follow_user_ids', - 'muting_user_ids', - 'muted_by_user_ids', + "collection": "social_profile", + "indexes": [ + "user_id", + "username", + "following_user_ids", + "follower_user_ids", + "unfollowed_user_ids", + "requested_follow_user_ids", + "muting_user_ids", + "muted_by_user_ids", ], - 'allow_inheritance': False, + "allow_inheritance": False, } - + def __str__(self): - return "%s following %s/%s, shared %s" % (self.user, - self.following_count, self.follower_count, self.shared_stories_count) - + return "%s following %s/%s, shared %s" % ( + self.user, + self.following_count, + self.follower_count, + self.shared_stories_count, + ) + @classmethod def get_user(cls, user_id): try: @@ -176,7 +184,7 @@ def get_user(cls, user_id): profile.save() return profile - + @property def user(self): try: @@ -199,35 +207,38 @@ def save(self, *args, **kwargs): self.location = strip_tags(self.location) if self.custom_css: self.custom_css = strip_tags(self.custom_css) - + super(MSocialProfile, self).save(*args, **kwargs) if self.user_id not in self.following_user_ids: self.follow_user(self.user_id, force=True) self.count_follows() - + return self - + @property def blurblog_url(self): - return "https://%s.%s/" % ( - self.username_slug, - Site.objects.get_current().domain.replace('www.', '')) - + return "https://%s.%s/" % (self.username_slug, Site.objects.get_current().domain.replace("www.", "")) + @property def blurblog_rss(self): - return "%s%s" % (self.blurblog_url, reverse('shared-stories-rss-feed', - kwargs={'user_id': self.user_id, - 'username': self.username_slug})) + return "%s%s" % ( + self.blurblog_url, + reverse( + "shared-stories-rss-feed", kwargs={"user_id": self.user_id, "username": self.username_slug} + ), + ) def find_stories(self, query, offset=0, limit=25): stories_db = MSharedStory.objects( - Q(user_id=self.user_id) & - (Q(story_title__icontains=query) | - Q(story_author_name__icontains=query) | - Q(story_tags__icontains=query)) - ).order_by('-shared_date')[offset:offset+limit] + Q(user_id=self.user_id) + & ( + Q(story_title__icontains=query) + | Q(story_author_name__icontains=query) + | Q(story_tags__icontains=query) + ) + ).order_by("-shared_date")[offset : offset + limit] stories = Feed.format_stories(stories_db) - + return stories def recommended_users(self): @@ -235,13 +246,21 @@ def recommended_users(self): following_key = "F:%s:F" % (self.user_id) social_follow_key = "FF:%s:F" % (self.user_id) profile_user_ids = [] - + # Find potential twitter/fb friends services = MSocialServices.get_user(self.user_id) - facebook_user_ids = [u.user_id for u in - MSocialServices.objects.filter(facebook_uid__in=services.facebook_friend_ids).only('user_id')] - twitter_user_ids = [u.user_id for u in - MSocialServices.objects.filter(twitter_uid__in=services.twitter_friend_ids).only('user_id')] + facebook_user_ids = [ + u.user_id + for u in MSocialServices.objects.filter(facebook_uid__in=services.facebook_friend_ids).only( + "user_id" + ) + ] + twitter_user_ids = [ + u.user_id + for u in MSocialServices.objects.filter(twitter_uid__in=services.twitter_friend_ids).only( + "user_id" + ) + ] social_user_ids = facebook_user_ids + twitter_user_ids # Find users not currently followed by this user r.delete(social_follow_key) @@ -251,10 +270,10 @@ def recommended_users(self): nonfriend_user_ids = r.sdiff(social_follow_key, following_key) profile_user_ids = [int(f) for f in nonfriend_user_ids] r.delete(social_follow_key) - + # Not enough? Grab popular users. if len(nonfriend_user_ids) < RECOMMENDATIONS_LIMIT: - homepage_user = User.objects.get(username='popular') + homepage_user = User.objects.get(username="popular") suggested_users_list = r.sdiff("F:%s:F" % homepage_user.pk, following_key) suggested_users_list = [int(f) for f in suggested_users_list] suggested_user_ids = [] @@ -262,32 +281,40 @@ def recommended_users(self): for slot in range(slots_left): suggested_user_ids.append(random.choice(suggested_users_list)) profile_user_ids.extend(suggested_user_ids) - + # Sort by shared story count - profiles = MSocialProfile.profiles(profile_user_ids).order_by('-shared_stories_count')[:RECOMMENDATIONS_LIMIT] + profiles = MSocialProfile.profiles(profile_user_ids).order_by("-shared_stories_count")[ + :RECOMMENDATIONS_LIMIT + ] return profiles - + @property def username_slug(self): return slugify(self.user.username if self.user else "[deleted]") - + def count_stories(self): # Popular Publishers self.save_popular_publishers() - + def save_popular_publishers(self, feed_publishers=None): if not feed_publishers: publishers = defaultdict(int) - for story in MSharedStory.objects(user_id=self.user_id).only('story_feed_id')[:500]: + for story in MSharedStory.objects(user_id=self.user_id).only("story_feed_id")[:500]: publishers[story.story_feed_id] += 1 - feed_titles = dict((f.id, f.feed_title) - for f in Feed.objects.filter(pk__in=list(publishers.keys())).only('id', 'feed_title')) - feed_publishers = sorted([{'id': k, 'feed_title': feed_titles[k], 'story_count': v} - for k, v in list(publishers.items()) - if k in feed_titles], - key=lambda f: f['story_count'], - reverse=True)[:20] + feed_titles = dict( + (f.id, f.feed_title) + for f in Feed.objects.filter(pk__in=list(publishers.keys())).only("id", "feed_title") + ) + feed_publishers = sorted( + [ + {"id": k, "feed_title": feed_titles[k], "story_count": v} + for k, v in list(publishers.items()) + if k in feed_titles + ], + key=lambda f: f["story_count"], + reverse=True, + )[:20] popular_publishers = json.encode(feed_publishers) if len(popular_publishers) < 1023: @@ -297,12 +324,12 @@ def save_popular_publishers(self, feed_publishers=None): if len(popular_publishers) > 1: self.save_popular_publishers(feed_publishers=feed_publishers[:-1]) - + @classmethod def profile(cls, user_id, include_follows=True): profile = cls.get_user(user_id) return profile.canonical(include_follows=True) - + @classmethod def profiles(cls, user_ids): profiles = cls.objects.filter(user_id__in=user_ids) @@ -313,148 +340,180 @@ def profile_feeds(cls, user_ids): profiles = cls.objects.filter(user_id__in=user_ids) profiles = dict((p.user_id, p.feed()) for p in profiles) return profiles - + @classmethod def sync_all_redis(cls): for profile in cls.objects.all(): profile.sync_redis(force=True) - + def sync_redis(self, force=False): self.following_user_ids = list(set(self.following_user_ids)) self.save() - + for user_id in self.following_user_ids: self.follow_user(user_id, force=force) - + self.follow_user(self.user_id, force=force) - + @property def title(self): - return self.blurblog_title if self.blurblog_title else (self.user.username if self.user else "[deleted]") + "'s blurblog" - + return ( + self.blurblog_title + if self.blurblog_title + else (self.user.username if self.user else "[deleted]") + "'s blurblog" + ) + def feed(self): params = self.canonical(compact=True) - params.update({ - 'feed_title': self.title, - 'page_url': reverse('load-social-page', kwargs={'user_id': self.user_id, 'username': self.username_slug}), - 'shared_stories_count': self.shared_stories_count, - }) + params.update( + { + "feed_title": self.title, + "page_url": reverse( + "load-social-page", kwargs={"user_id": self.user_id, "username": self.username_slug} + ), + "shared_stories_count": self.shared_stories_count, + } + ) return params - + def page(self): params = self.canonical(include_follows=True) - params.update({ - 'feed_title': self.title, - 'custom_css': self.custom_css, - }) + params.update( + { + "feed_title": self.title, + "custom_css": self.custom_css, + } + ) return params - + @property def profile_photo_url(self): if self.photo_url: return self.photo_url - return settings.MEDIA_URL + 'img/reader/default_profile_photo.png' - + return settings.MEDIA_URL + "img/reader/default_profile_photo.png" + @property def large_photo_url(self): photo_url = self.email_photo_url - if 'graph.facebook.com' in photo_url: - return photo_url + '?type=large' - elif 'twimg' in photo_url: - return photo_url.replace('_normal', '') - elif '/avatars/' in photo_url: - return photo_url.replace('thumbnail_', 'large_') + if "graph.facebook.com" in photo_url: + return photo_url + "?type=large" + elif "twimg" in photo_url: + return photo_url.replace("_normal", "") + elif "/avatars/" in photo_url: + return photo_url.replace("thumbnail_", "large_") return photo_url - + @property def email_photo_url(self): if self.photo_url: - if self.photo_url.startswith('//'): - self.photo_url = 'https:' + self.photo_url + if self.photo_url.startswith("//"): + self.photo_url = "https:" + self.photo_url return self.photo_url domain = Site.objects.get_current().domain - return 'https://' + domain + settings.MEDIA_URL + 'img/reader/default_profile_photo.png' - - def canonical(self, compact=False, include_follows=False, common_follows_with_user=None, - include_settings=False, include_following_user=None): + return "https://" + domain + settings.MEDIA_URL + "img/reader/default_profile_photo.png" + + def canonical( + self, + compact=False, + include_follows=False, + common_follows_with_user=None, + include_settings=False, + include_following_user=None, + ): domain = Site.objects.get_current().domain params = { - 'id': 'social:%s' % self.user_id, - 'user_id': self.user_id, - 'username': self.user.username if self.user else "[deleted]", - 'photo_url': self.email_photo_url, - 'large_photo_url': self.large_photo_url, - 'location': self.location, - 'num_subscribers': self.follower_count, - 'feed_title': self.title, - 'feed_address': "http://%s%s" % (domain, reverse('shared-stories-rss-feed', - kwargs={'user_id': self.user_id, 'username': self.username_slug})), - 'feed_link': self.blurblog_url, - 'protected': self.protected, - 'private': self.private, - 'active': True, + "id": "social:%s" % self.user_id, + "user_id": self.user_id, + "username": self.user.username if self.user else "[deleted]", + "photo_url": self.email_photo_url, + "large_photo_url": self.large_photo_url, + "location": self.location, + "num_subscribers": self.follower_count, + "feed_title": self.title, + "feed_address": "http://%s%s" + % ( + domain, + reverse( + "shared-stories-rss-feed", + kwargs={"user_id": self.user_id, "username": self.username_slug}, + ), + ), + "feed_link": self.blurblog_url, + "protected": self.protected, + "private": self.private, + "active": True, } if not compact: - params.update({ - 'large_photo_url': self.large_photo_url, - 'bio': self.bio, - 'website': self.website, - 'shared_stories_count': self.shared_stories_count, - 'following_count': self.following_count, - 'follower_count': self.follower_count, - 'popular_publishers': json.decode(self.popular_publishers), - 'stories_last_month': self.stories_last_month, - 'average_stories_per_month': self.average_stories_per_month, - }) + params.update( + { + "large_photo_url": self.large_photo_url, + "bio": self.bio, + "website": self.website, + "shared_stories_count": self.shared_stories_count, + "following_count": self.following_count, + "follower_count": self.follower_count, + "popular_publishers": json.decode(self.popular_publishers), + "stories_last_month": self.stories_last_month, + "average_stories_per_month": self.average_stories_per_month, + } + ) if include_settings: - params.update({ - 'custom_css': self.custom_css, - 'custom_bgcolor': self.custom_bgcolor, - 'bb_permalink_direct': self.bb_permalink_direct, - }) + params.update( + { + "custom_css": self.custom_css, + "custom_bgcolor": self.custom_bgcolor, + "bb_permalink_direct": self.bb_permalink_direct, + } + ) if include_follows: - params.update({ - 'photo_service': self.photo_service, - 'following_user_ids': self.following_user_ids_without_self[:48], - 'follower_user_ids': self.follower_user_ids_without_self[:48], - }) + params.update( + { + "photo_service": self.photo_service, + "following_user_ids": self.following_user_ids_without_self[:48], + "follower_user_ids": self.follower_user_ids_without_self[:48], + } + ) if common_follows_with_user: FOLLOWERS_LIMIT = 128 with_user = MSocialProfile.get_user(common_follows_with_user) - followers_youknow, followers_everybody = with_user.common_follows(self.user_id, direction='followers') - following_youknow, following_everybody = with_user.common_follows(self.user_id, direction='following') - params['followers_youknow'] = followers_youknow[:FOLLOWERS_LIMIT] - params['followers_everybody'] = followers_everybody[:FOLLOWERS_LIMIT] - params['following_youknow'] = following_youknow[:FOLLOWERS_LIMIT] - params['following_everybody'] = following_everybody[:FOLLOWERS_LIMIT] - params['requested_follow'] = common_follows_with_user in self.requested_follow_user_ids + followers_youknow, followers_everybody = with_user.common_follows( + self.user_id, direction="followers" + ) + following_youknow, following_everybody = with_user.common_follows( + self.user_id, direction="following" + ) + params["followers_youknow"] = followers_youknow[:FOLLOWERS_LIMIT] + params["followers_everybody"] = followers_everybody[:FOLLOWERS_LIMIT] + params["following_youknow"] = following_youknow[:FOLLOWERS_LIMIT] + params["following_everybody"] = following_everybody[:FOLLOWERS_LIMIT] + params["requested_follow"] = common_follows_with_user in self.requested_follow_user_ids if include_following_user or common_follows_with_user: if not include_following_user: include_following_user = common_follows_with_user if include_following_user != self.user_id: - params['followed_by_you'] = bool(self.is_followed_by_user(include_following_user)) - params['following_you'] = self.is_following_user(include_following_user) - params['muted'] = include_following_user in self.muted_by_user_ids + params["followed_by_you"] = bool(self.is_followed_by_user(include_following_user)) + params["following_you"] = self.is_following_user(include_following_user) + params["muted"] = include_following_user in self.muted_by_user_ids return params - + @property def following_user_ids_without_self(self): if self.user_id in self.following_user_ids: return [u for u in self.following_user_ids if u != self.user_id] return self.following_user_ids - + @property def follower_user_ids_without_self(self): if self.user_id in self.follower_user_ids: return [u for u in self.follower_user_ids if u != self.user_id] return self.follower_user_ids - + def import_user_fields(self): user = User.objects.get(pk=self.user_id) self.username = user.username self.email = user.email - + def count_follows(self, skip_save=False): self.subscription_count = UserSubscription.objects.filter(user__pk=self.user_id).count() self.shared_stories_count = MSharedStory.objects.filter(user_id=self.user_id).count() @@ -462,31 +521,31 @@ def count_follows(self, skip_save=False): self.follower_count = len(self.follower_user_ids_without_self) if not skip_save: self.save() - + def follow_user(self, user_id, check_unfollowed=False, force=False): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if check_unfollowed and user_id in self.unfollowed_user_ids: return - + if self.user_id == user_id: followee = self else: followee = MSocialProfile.get_user(user_id) - + logging.debug(" ---> ~FB~SB%s~SN (%s) following %s" % (self.user.username, self.user_id, user_id)) - + if not followee.protected or force: if user_id not in self.following_user_ids: self.following_user_ids.append(user_id) elif not force: return - + if user_id in self.unfollowed_user_ids: self.unfollowed_user_ids.remove(user_id) self.count_follows() self.save() - + if followee.protected and user_id != self.user_id and not force: if self.user_id not in followee.requested_follow_user_ids: followee.requested_follow_user_ids.append(self.user_id) @@ -498,11 +557,13 @@ def follow_user(self, user_id, check_unfollowed=False, force=False): if followee.protected and user_id != self.user_id and not force: from apps.social.tasks import EmailFollowRequest - EmailFollowRequest.apply_async(kwargs=dict(follower_user_id=self.user_id, - followee_user_id=user_id), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) + + EmailFollowRequest.apply_async( + kwargs=dict(follower_user_id=self.user_id, followee_user_id=user_id), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) return - + following_key = "F:%s:F" % (self.user_id) r.sadd(following_key, user_id) follower_key = "F:%s:f" % (user_id) @@ -511,7 +572,7 @@ def follow_user(self, user_id, check_unfollowed=False, force=False): if user_id != self.user_id: MInteraction.new_follow(follower_user_id=self.user_id, followee_user_id=user_id) MActivity.new_follow(follower_user_id=self.user_id, followee_user_id=user_id) - + params = dict(user_id=self.user_id, subscription_user_id=user_id) try: socialsub = MSocialSubscription.objects.get(**params) @@ -519,31 +580,33 @@ def follow_user(self, user_id, check_unfollowed=False, force=False): socialsub = MSocialSubscription.objects.create(**params) socialsub.needs_unread_recalc = True socialsub.save() - + MFollowRequest.remove(self.user_id, user_id) - + if not force: from apps.social.tasks import EmailNewFollower - EmailNewFollower.apply_async(kwargs=dict(follower_user_id=self.user_id, - followee_user_id=user_id), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - + + EmailNewFollower.apply_async( + kwargs=dict(follower_user_id=self.user_id, followee_user_id=user_id), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + return socialsub - + def is_following_user(self, user_id): # XXX TODO: Outsource to redis return user_id in self.following_user_ids - + def is_followed_by_user(self, user_id): # XXX TODO: Outsource to redis return user_id in self.follower_user_ids - + def unfollow_user(self, user_id): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if not isinstance(user_id, int): user_id = int(user_id) - + if user_id == self.user_id: # Only unfollow other people, not yourself. return @@ -554,7 +617,7 @@ def unfollow_user(self, user_id): self.unfollowed_user_ids.append(user_id) self.count_follows() self.save() - + followee = MSocialProfile.get_user(user_id) if self.user_id in followee.follower_user_ids: followee.follower_user_ids.remove(self.user_id) @@ -565,34 +628,34 @@ def unfollow_user(self, user_id): followee.count_follows() followee.save() MFollowRequest.remove(self.user_id, user_id) - + following_key = "F:%s:F" % (self.user_id) r.srem(following_key, user_id) follower_key = "F:%s:f" % (user_id) r.srem(follower_key, self.user_id) - + try: MSocialSubscription.objects.get(user_id=self.user_id, subscription_user_id=user_id).delete() except MSocialSubscription.DoesNotExist: return False - - def common_follows(self, user_id, direction='followers'): + + def common_follows(self, user_id, direction="followers"): r = redis.Redis(connection_pool=settings.REDIS_POOL) - - my_followers = "F:%s:%s" % (self.user_id, 'F' if direction == 'followers' else 'F') - their_followers = "F:%s:%s" % (user_id, 'f' if direction == 'followers' else 'F') - follows_inter = r.sinter(their_followers, my_followers) - follows_diff = r.sdiff(their_followers, my_followers) - follows_inter = [int(f) for f in follows_inter] - follows_diff = [int(f) for f in follows_diff] - + + my_followers = "F:%s:%s" % (self.user_id, "F" if direction == "followers" else "F") + their_followers = "F:%s:%s" % (user_id, "f" if direction == "followers" else "F") + follows_inter = r.sinter(their_followers, my_followers) + follows_diff = r.sdiff(their_followers, my_followers) + follows_inter = [int(f) for f in follows_inter] + follows_diff = [int(f) for f in follows_diff] + if user_id in follows_inter: follows_inter.remove(user_id) if user_id in follows_diff: follows_diff.remove(user_id) - + return follows_inter, follows_diff - + def send_email_for_new_follower(self, follower_user_id): user = User.objects.get(pk=self.user_id) if follower_user_id not in self.follower_user_ids: @@ -606,45 +669,49 @@ def send_email_for_new_follower(self, follower_user_id): return if self.user_id == follower_user_id: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk, - sending_user_id=follower_user_id, - email_type='new_follower') + + emails_sent = MSentEmail.objects.filter( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="new_follower" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago: logging.user(user, "~SK~FMNot sending new follower email, already sent before. NBD.") return - + follower_profile = MSocialProfile.get_user(follower_user_id) - common_followers, _ = self.common_follows(follower_user_id, direction='followers') - common_followings, _ = self.common_follows(follower_user_id, direction='following') + common_followers, _ = self.common_follows(follower_user_id, direction="followers") + common_followings, _ = self.common_follows(follower_user_id, direction="following") if self.user_id in common_followers: common_followers.remove(self.user_id) if self.user_id in common_followings: common_followings.remove(self.user_id) common_followers = MSocialProfile.profiles(common_followers) common_followings = MSocialProfile.profiles(common_followings) - + data = { - 'user': user, - 'follower_profile': follower_profile, - 'common_followers': common_followers, - 'common_followings': common_followings, + "user": user, + "follower_profile": follower_profile, + "common_followers": common_followers, + "common_followings": common_followings, } - - text = render_to_string('mail/email_new_follower.txt', data) - html = render_to_string('mail/email_new_follower.xhtml', data) + + text = render_to_string("mail/email_new_follower.txt", data) + html = render_to_string("mail/email_new_follower.xhtml", data) subject = "%s is now following your Blurblog on NewsBlur!" % follower_profile.user.username - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id, - email_type='new_follower') - + + MSentEmail.record( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="new_follower" + ) + logging.user(user, "~BB~FM~SBSending email for new follower: %s" % follower_profile.user.username) def send_email_for_follow_request(self, follower_user_id): @@ -660,57 +727,61 @@ def send_email_for_follow_request(self, follower_user_id): return if self.user_id == follower_user_id: return - - emails_sent = MSentEmail.objects.filter(receiver_user_id=user.pk, - sending_user_id=follower_user_id, - email_type='follow_request') + + emails_sent = MSentEmail.objects.filter( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="follow_request" + ) day_ago = datetime.datetime.now() - datetime.timedelta(days=1) for email in emails_sent: if email.date_sent > day_ago: logging.user(user, "~SK~FMNot sending follow request email, already sent before. NBD.") return - + follower_profile = MSocialProfile.get_user(follower_user_id) - common_followers, _ = self.common_follows(follower_user_id, direction='followers') - common_followings, _ = self.common_follows(follower_user_id, direction='following') + common_followers, _ = self.common_follows(follower_user_id, direction="followers") + common_followings, _ = self.common_follows(follower_user_id, direction="following") if self.user_id in common_followers: common_followers.remove(self.user_id) if self.user_id in common_followings: common_followings.remove(self.user_id) common_followers = MSocialProfile.profiles(common_followers) common_followings = MSocialProfile.profiles(common_followings) - + data = { - 'user': user, - 'follower_profile': follower_profile, - 'common_followers': common_followers, - 'common_followings': common_followings, + "user": user, + "follower_profile": follower_profile, + "common_followers": common_followers, + "common_followings": common_followings, } - - text = render_to_string('mail/email_follow_request.txt', data) - html = render_to_string('mail/email_follow_request.xhtml', data) + + text = render_to_string("mail/email_follow_request.txt", data) + html = render_to_string("mail/email_follow_request.xhtml", data) subject = "%s has requested to follow your Blurblog on NewsBlur" % follower_profile.user.username - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - - MSentEmail.record(receiver_user_id=user.pk, sending_user_id=follower_user_id, - email_type='follow_request') - + + MSentEmail.record( + receiver_user_id=user.pk, sending_user_id=follower_user_id, email_type="follow_request" + ) + logging.user(user, "~BB~FM~SBSending email for follow request: %s" % follower_profile.user.username) - + def mute_user(self, muting_user_id): if muting_user_id not in self.muting_user_ids: self.muting_user_ids.append(muting_user_id) self.save() - + muting_user_profile = MSocialProfile.get_user(muting_user_id) if self.user_id not in muting_user_profile.muted_by_user_ids: muting_user_profile.muted_by_user_ids.append(self.user_id) muting_user_profile.save() - + def unmute_user(self, muting_user_id): if muting_user_id in self.muting_user_ids: self.muting_user_ids.remove(muting_user_id) @@ -720,11 +791,11 @@ def unmute_user(self, muting_user_id): if self.user_id in muting_user_profile.muted_by_user_ids: muting_user_profile.muted_by_user_ids.remove(self.user_id) muting_user_profile.save() - + def save_feed_story_history_statistics(self): """ Fills in missing months between earlier occurances and now. - + Save format: [('YYYY-MM, #), ...] Example output: [(2010-12, 123), (2011-01, 146)] """ @@ -750,23 +821,23 @@ def save_feed_story_history_statistics(self): dates = defaultdict(int) hours = defaultdict(int) days = defaultdict(int) - results = MSharedStory.objects(user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline') + results = MSharedStory.objects(user_id=self.user_id).map_reduce(map_f, reduce_f, output="inline") for result in results: - dates[result.value['month']] += 1 - hours[str(int(result.value['hour']))] += 1 - days[str(int(result.value['day']))] += 1 - year = int(re.findall(r"(\d{4})-\d{1,2}", result.value['month'])[0]) + dates[result.value["month"]] += 1 + hours[str(int(result.value["hour"]))] += 1 + days[str(int(result.value["day"]))] += 1 + year = int(re.findall(r"(\d{4})-\d{1,2}", result.value["month"])[0]) if year < min_year: min_year = year - - # Assemble a list with 0's filled in for missing months, + + # Assemble a list with 0's filled in for missing months, # trimming left and right 0's. months = [] start = False - for year in range(min_year, now.year+1): - for month in range(1, 12+1): + for year in range(min_year, now.year + 1): + for month in range(1, 12 + 1): if datetime.datetime(year, month, 1) < now: - key = '%s-%s' % (year, month) + key = "%s-%s" % (year, month) if dates.get(key) or start: start = True months.append((key, dates.get(key, 0))) @@ -778,9 +849,8 @@ def save_feed_story_history_statistics(self): self.story_hours_history = hours self.average_stories_per_month = total / max(1, month_count) self.save() - + def save_classifier_counts(self): - def calculate_scores(cls, facet): map_f = """ function() { @@ -789,7 +859,9 @@ def calculate_scores(cls, facet): neg: this.score<0 ? Math.abs(this.score) : 0 }); } - """ % (facet) + """ % ( + facet + ) reduce_f = """ function(key, values) { var result = {pos: 0, neg: 0}; @@ -801,40 +873,42 @@ def calculate_scores(cls, facet): } """ scores = [] - res = cls.objects(social_user_id=self.user_id).map_reduce(map_f, reduce_f, output='inline') + res = cls.objects(social_user_id=self.user_id).map_reduce(map_f, reduce_f, output="inline") for r in res: - facet_values = dict([(k, int(v)) for k,v in list(r.value.items())]) + facet_values = dict([(k, int(v)) for k, v in list(r.value.items())]) facet_values[facet] = r.key scores.append(facet_values) - scores = sorted(scores, key=lambda v: v['neg'] - v['pos']) + scores = sorted(scores, key=lambda v: v["neg"] - v["pos"]) return scores - + scores = {} - for cls, facet in [(MClassifierTitle, 'title'), - (MClassifierAuthor, 'author'), - (MClassifierTag, 'tag'), - (MClassifierFeed, 'feed_id')]: + for cls, facet in [ + (MClassifierTitle, "title"), + (MClassifierAuthor, "author"), + (MClassifierTag, "tag"), + (MClassifierFeed, "feed_id"), + ]: scores[facet] = calculate_scores(cls, facet) - if facet == 'feed_id' and scores[facet]: - scores['feed'] = scores[facet] - del scores['feed_id'] + if facet == "feed_id" and scores[facet]: + scores["feed"] = scores[facet] + del scores["feed_id"] elif not scores[facet]: del scores[facet] - + if scores: self.feed_classifier_counts = scores self.save() - + def save_sent_email(self, max_quota=20): if not self.story_email_history: self.story_email_history = [] - + self.story_email_history.insert(0, datetime.datetime.now()) self.story_email_history = self.story_email_history[:max_quota] - + self.save() - + def over_story_email_quota(self, quota=1, hours=24): counted = 0 day_ago = datetime.datetime.now() - datetime.timedelta(hours=hours) @@ -846,17 +920,18 @@ def over_story_email_quota(self, quota=1, hours=24): for sent_date in sent_emails: if sent_date > day_ago: counted += 1 - + if counted >= quota: return True - + return False - + + class MSocialSubscription(mongo.Document): UNREAD_CUTOFF = datetime.datetime.utcnow() - datetime.timedelta(days=settings.DAYS_OF_UNREAD) user_id = mongo.IntField() - subscription_user_id = mongo.IntField(unique_with='user_id') + subscription_user_id = mongo.IntField(unique_with="user_id") follow_date = mongo.DateTimeField(default=datetime.datetime.utcnow()) last_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF) mark_read_date = mongo.DateTimeField(default=UNREAD_CUTOFF) @@ -869,24 +944,31 @@ class MSocialSubscription(mongo.Document): feed_opens = mongo.IntField(default=0) is_trained = mongo.BooleanField(default=False) active = mongo.BooleanField(default=True) - + meta = { - 'collection': 'social_subscription', - 'indexes': [('user_id', 'subscription_user_id')], - 'allow_inheritance': False, - 'strict': False, + "collection": "social_subscription", + "indexes": [("user_id", "subscription_user_id")], + "allow_inheritance": False, + "strict": False, } def __str__(self): user = User.objects.get(pk=self.user_id) subscription_user = User.objects.get(pk=self.subscription_user_id) return "Socialsub %s:%s" % (user, subscription_user) - + @classmethod - def feeds(cls, user_id=None, subscription_user_id=None, calculate_all_scores=False, - update_counts=False, *args, **kwargs): + def feeds( + cls, + user_id=None, + subscription_user_id=None, + calculate_all_scores=False, + update_counts=False, + *args, + **kwargs, + ): params = { - 'user_id': user_id, + "user_id": user_id, } if subscription_user_id: params["subscription_user_id"] = subscription_user_id @@ -895,125 +977,139 @@ def feeds(cls, user_id=None, subscription_user_id=None, calculate_all_scores=Fal social_feeds = [] if social_subs: if calculate_all_scores: - for s in social_subs: s.calculate_feed_scores() + for s in social_subs: + s.calculate_feed_scores() # Fetch user profiles of subscriptions social_user_ids = [sub.subscription_user_id for sub in social_subs] social_profiles = MSocialProfile.profile_feeds(social_user_ids) for social_sub in social_subs: user_id = social_sub.subscription_user_id - if social_profiles[user_id]['shared_stories_count'] <= 0: + if social_profiles[user_id]["shared_stories_count"] <= 0: continue if update_counts and social_sub.needs_unread_recalc: social_sub.calculate_feed_scores() - + # Combine subscription read counts with feed/user info feed = dict(list(social_sub.canonical().items()) + list(social_profiles[user_id].items())) social_feeds.append(feed) return social_feeds - + @classmethod def feeds_with_updated_counts(cls, user, social_feed_ids=None): feeds = {} - + # Get social subscriptions for user user_subs = cls.objects.filter(user_id=user.pk) if social_feed_ids: - social_user_ids = [int(f.replace('social:', '')) for f in social_feed_ids] + social_user_ids = [int(f.replace("social:", "")) for f in social_feed_ids] user_subs = user_subs.filter(subscription_user_id__in=social_user_ids) profiles = MSocialProfile.objects.filter(user_id__in=social_user_ids) profiles = dict((p.user_id, p) for p in profiles) - + for i, sub in enumerate(user_subs): # Count unreads if subscription is stale. - if (sub.needs_unread_recalc or - (sub.unread_count_updated and - sub.unread_count_updated < user.profile.unread_cutoff) or - (sub.oldest_unread_story_date and - sub.oldest_unread_story_date < user.profile.unread_cutoff)): + if ( + sub.needs_unread_recalc + or (sub.unread_count_updated and sub.unread_count_updated < user.profile.unread_cutoff) + or ( + sub.oldest_unread_story_date and sub.oldest_unread_story_date < user.profile.unread_cutoff + ) + ): sub = sub.calculate_feed_scores(force=True, silent=True) feed_id = "social:%s" % sub.subscription_user_id feeds[feed_id] = { - 'ps': sub.unread_count_positive, - 'nt': sub.unread_count_neutral, - 'ng': sub.unread_count_negative, - 'id': feed_id, + "ps": sub.unread_count_positive, + "nt": sub.unread_count_neutral, + "ng": sub.unread_count_negative, + "id": feed_id, } if social_feed_ids and sub.subscription_user_id in profiles: - feeds[feed_id]['shared_stories_count'] = profiles[sub.subscription_user_id].shared_stories_count + feeds[feed_id]["shared_stories_count"] = profiles[ + sub.subscription_user_id + ].shared_stories_count return feeds - + def canonical(self): return { - 'user_id': self.user_id, - 'active': self.active, - 'subscription_user_id': self.subscription_user_id, - 'nt': self.unread_count_neutral, - 'ps': self.unread_count_positive, - 'ng': self.unread_count_negative, - 'is_trained': self.is_trained, - 'feed_opens': self.feed_opens, + "user_id": self.user_id, + "active": self.active, + "subscription_user_id": self.subscription_user_id, + "nt": self.unread_count_neutral, + "ps": self.unread_count_positive, + "ng": self.unread_count_negative, + "is_trained": self.is_trained, + "feed_opens": self.feed_opens, } @classmethod def subs_for_users(cls, user_id, subscription_user_ids=None, read_filter="unread"): socialsubs = cls.objects if read_filter == "unread": - socialsubs = socialsubs.filter(Q(unread_count_neutral__gt=0) | - Q(unread_count_positive__gt=0)) + socialsubs = socialsubs.filter(Q(unread_count_neutral__gt=0) | Q(unread_count_positive__gt=0)) if not subscription_user_ids: - socialsubs = socialsubs.filter(user_id=user_id)\ - .only('subscription_user_id', 'mark_read_date', 'is_trained') + socialsubs = socialsubs.filter(user_id=user_id).only( + "subscription_user_id", "mark_read_date", "is_trained" + ) else: - socialsubs = socialsubs.filter(user_id=user_id, - subscription_user_id__in=subscription_user_ids)\ - .only('subscription_user_id', 'mark_read_date', 'is_trained') - + socialsubs = socialsubs.filter( + user_id=user_id, subscription_user_id__in=subscription_user_ids + ).only("subscription_user_id", "mark_read_date", "is_trained") + return socialsubs @classmethod - def story_hashes(cls, user_id, relative_user_id, subscription_user_ids=None, socialsubs=None, - read_filter="unread", order="newest", - include_timestamps=False, group_by_user=True, cutoff_date=None): + def story_hashes( + cls, + user_id, + relative_user_id, + subscription_user_ids=None, + socialsubs=None, + read_filter="unread", + order="newest", + include_timestamps=False, + group_by_user=True, + cutoff_date=None, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) pipeline = r.pipeline() story_hashes = {} if group_by_user else [] if not socialsubs: - socialsubs = cls.subs_for_users(relative_user_id, - subscription_user_ids=subscription_user_ids, - read_filter=read_filter) + socialsubs = cls.subs_for_users( + relative_user_id, subscription_user_ids=subscription_user_ids, read_filter=read_filter + ) subscription_user_ids = [sub.subscription_user_id for sub in socialsubs] if not subscription_user_ids: return story_hashes - - current_time = int(time.time() + 60*60*24) + + current_time = int(time.time() + 60 * 60 * 24) if not cutoff_date: cutoff_date = datetime.datetime.now() - datetime.timedelta(days=settings.DAYS_OF_STORY_HASHES) - unread_timestamp = int(time.mktime(cutoff_date.timetuple()))-1000 + unread_timestamp = int(time.mktime(cutoff_date.timetuple())) - 1000 feed_counter = 0 read_dates = dict() for us in socialsubs: - read_dates[us.subscription_user_id] = int(max(us.mark_read_date, cutoff_date).strftime('%s')) + read_dates[us.subscription_user_id] = int(max(us.mark_read_date, cutoff_date).strftime("%s")) for sub_user_id_group in chunks(subscription_user_ids, 20): pipeline = r.pipeline() for sub_user_id in sub_user_id_group: - stories_key = 'B:%s' % (sub_user_id) - sorted_stories_key = 'zB:%s' % (sub_user_id) - read_stories_key = 'RS:%s' % (user_id) - read_social_stories_key = 'RS:%s:B:%s' % (user_id, sub_user_id) - unread_stories_key = 'UB:%s:%s' % (user_id, sub_user_id) - sorted_stories_key = 'zB:%s' % (sub_user_id) - unread_ranked_stories_key = 'zUB:%s:%s' % (user_id, sub_user_id) + stories_key = "B:%s" % (sub_user_id) + sorted_stories_key = "zB:%s" % (sub_user_id) + read_stories_key = "RS:%s" % (user_id) + read_social_stories_key = "RS:%s:B:%s" % (user_id, sub_user_id) + unread_stories_key = "UB:%s:%s" % (user_id, sub_user_id) + sorted_stories_key = "zB:%s" % (sub_user_id) + unread_ranked_stories_key = "zUB:%s:%s" % (user_id, sub_user_id) expire_unread_stories_key = False - + max_score = current_time - if read_filter == 'unread': + if read_filter == "unread": # +1 for the intersection b/w zF and F, which carries an implicit score of 1. min_score = read_dates[sub_user_id] + 1 pipeline.sdiffstore(unread_stories_key, stories_key, read_stories_key) @@ -1023,67 +1119,78 @@ def story_hashes(cls, user_id, relative_user_id, subscription_user_ids=None, soc min_score = unread_timestamp unread_stories_key = stories_key - if order == 'oldest': + if order == "oldest": byscorefunc = pipeline.zrangebyscore else: byscorefunc = pipeline.zrevrangebyscore min_score, max_score = max_score, min_score - + pipeline.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key]) byscorefunc(unread_ranked_stories_key, min_score, max_score, withscores=include_timestamps) pipeline.delete(unread_ranked_stories_key) if expire_unread_stories_key: pipeline.delete(unread_stories_key) - results = pipeline.execute() - + for hashes in results: - if not isinstance(hashes, list): continue + if not isinstance(hashes, list): + continue if group_by_user: story_hashes[subscription_user_ids[feed_counter]] = hashes feed_counter += 1 else: story_hashes.extend(hashes) - + return story_hashes - - def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', - withscores=False, hashes_only=False, cutoff_date=None, - mark_read_complement=False): + + def get_stories( + self, + offset=0, + limit=6, + order="newest", + read_filter="all", + withscores=False, + hashes_only=False, + cutoff_date=None, + mark_read_complement=False, + ): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) ignore_user_stories = False - - stories_key = 'B:%s' % (self.subscription_user_id) - read_stories_key = 'RS:%s' % (self.user_id) - read_social_stories_key = 'RS:%s:B:%s' % (self.user_id, self.subscription_user_id) - unread_stories_key = 'UB:%s:%s' % (self.user_id, self.subscription_user_id) + + stories_key = "B:%s" % (self.subscription_user_id) + read_stories_key = "RS:%s" % (self.user_id) + read_social_stories_key = "RS:%s:B:%s" % (self.user_id, self.subscription_user_id) + unread_stories_key = "UB:%s:%s" % (self.user_id, self.subscription_user_id) if not r.exists(stories_key): return [] - elif read_filter != 'unread' or not r.exists(read_stories_key): + elif read_filter != "unread" or not r.exists(read_stories_key): ignore_user_stories = True unread_stories_key = stories_key else: r.sdiffstore(unread_stories_key, stories_key, read_stories_key) r.sdiffstore(unread_stories_key, unread_stories_key, read_social_stories_key) - sorted_stories_key = 'zB:%s' % (self.subscription_user_id) - unread_ranked_stories_key = 'z%sUB:%s:%s' % ('h' if hashes_only else '', - self.user_id, self.subscription_user_id) + sorted_stories_key = "zB:%s" % (self.subscription_user_id) + unread_ranked_stories_key = "z%sUB:%s:%s" % ( + "h" if hashes_only else "", + self.user_id, + self.subscription_user_id, + ) r.zinterstore(unread_ranked_stories_key, [sorted_stories_key, unread_stories_key]) - - now = datetime.datetime.now() - current_time = int(time.time() + 60*60*24) - mark_read_time = int(time.mktime(self.mark_read_date.timetuple())) + 1 + + now = datetime.datetime.now() + current_time = int(time.time() + 60 * 60 * 24) + mark_read_time = int(time.mktime(self.mark_read_date.timetuple())) + 1 if cutoff_date: - mark_read_time = int(time.mktime(cutoff_date.timetuple())) + 1 - - if order == 'oldest': + mark_read_time = int(time.mktime(cutoff_date.timetuple())) + 1 + + if order == "oldest": byscorefunc = r.zrangebyscore min_score = mark_read_time max_score = current_time - else: # newest + else: # newest byscorefunc = r.zrevrangebyscore min_score = current_time if mark_read_complement: @@ -1092,44 +1199,58 @@ def get_stories(self, offset=0, limit=6, order='newest', read_filter='all', unread_cutoff = cutoff_date if not unread_cutoff: unread_cutoff = now - datetime.timedelta(days=settings.DAYS_OF_UNREAD) - max_score = int(time.mktime(unread_cutoff.timetuple()))-1 + max_score = int(time.mktime(unread_cutoff.timetuple())) - 1 + + story_ids = byscorefunc( + unread_ranked_stories_key, min_score, max_score, start=offset, num=limit, withscores=withscores + ) - story_ids = byscorefunc(unread_ranked_stories_key, min_score, - max_score, start=offset, num=limit, - withscores=withscores) - if withscores: story_ids = [(s[0], int(s[1])) for s in story_ids] - - r.expire(unread_ranked_stories_key, 1*60*60) + + r.expire(unread_ranked_stories_key, 1 * 60 * 60) if not ignore_user_stories: r.delete(unread_stories_key) return story_ids - + @classmethod - def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, - order='newest', read_filter='all', relative_user_id=None, cache=True, - socialsubs=None, cutoff_date=None, dashboard_global=False): + def feed_stories( + cls, + user_id, + social_user_ids, + offset=0, + limit=6, + order="newest", + read_filter="all", + relative_user_id=None, + cache=True, + socialsubs=None, + cutoff_date=None, + dashboard_global=False, + ): rt = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_TEMP_POOL) - + if not relative_user_id: relative_user_id = user_id - - if order == 'oldest': + + if order == "oldest": range_func = rt.zrange else: range_func = rt.zrevrange - + if not isinstance(social_user_ids, list): social_user_ids = [social_user_ids] - ranked_stories_keys = 'zU:%s:social' % (user_id) - unread_ranked_stories_keys = 'zhU:%s:social' % (user_id) - if ((offset or dashboard_global) and cache and - rt.exists(ranked_stories_keys) and - rt.exists(unread_ranked_stories_keys)): + ranked_stories_keys = "zU:%s:social" % (user_id) + unread_ranked_stories_keys = "zhU:%s:social" % (user_id) + if ( + (offset or dashboard_global) + and cache + and rt.exists(ranked_stories_keys) + and rt.exists(unread_ranked_stories_keys) + ): story_hashes_and_dates = range_func(ranked_stories_keys, offset, limit, withscores=True) if not story_hashes_and_dates: return [], [], [] @@ -1137,22 +1258,26 @@ def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, if read_filter == "unread": unread_story_hashes = story_hashes else: - unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset+limit) + unread_story_hashes = range_func(unread_ranked_stories_keys, 0, offset + limit) return story_hashes, story_dates, unread_story_hashes else: rt.delete(ranked_stories_keys) rt.delete(unread_ranked_stories_keys) - - story_hashes = cls.story_hashes(user_id, relative_user_id, - subscription_user_ids=social_user_ids, - read_filter=read_filter, order=order, - include_timestamps=True, - group_by_user=False, - socialsubs=socialsubs, - cutoff_date=cutoff_date) + + story_hashes = cls.story_hashes( + user_id, + relative_user_id, + subscription_user_ids=social_user_ids, + read_filter=read_filter, + order=order, + include_timestamps=True, + group_by_user=False, + socialsubs=socialsubs, + cutoff_date=cutoff_date, + ) if not story_hashes: return [], [], [] - + pipeline = rt.pipeline() for story_hash_group in chunks(story_hashes, 100): pipeline.zadd(ranked_stories_keys, dict(story_hash_group)) @@ -1166,85 +1291,100 @@ def feed_stories(cls, user_id, social_user_ids, offset=0, limit=6, unread_feed_story_hashes = story_hashes rt.zunionstore(unread_ranked_stories_keys, [ranked_stories_keys]) else: - unread_story_hashes = cls.story_hashes(user_id, relative_user_id, - subscription_user_ids=social_user_ids, - read_filter="unread", order=order, - include_timestamps=True, - group_by_user=False, - socialsubs=socialsubs, - cutoff_date=cutoff_date) + unread_story_hashes = cls.story_hashes( + user_id, + relative_user_id, + subscription_user_ids=social_user_ids, + read_filter="unread", + order=order, + include_timestamps=True, + group_by_user=False, + socialsubs=socialsubs, + cutoff_date=cutoff_date, + ) if unread_story_hashes: pipeline = rt.pipeline() for unread_story_hash_group in chunks(unread_story_hashes, 100): pipeline.zadd(unread_ranked_stories_keys, dict(unread_story_hash_group)) pipeline.execute() unread_feed_story_hashes = range_func(unread_ranked_stories_keys, offset, limit) - - rt.expire(ranked_stories_keys, 60*60) - rt.expire(unread_ranked_stories_keys, 60*60) - + + rt.expire(ranked_stories_keys, 60 * 60) + rt.expire(unread_ranked_stories_keys, 60 * 60) + return story_hashes, story_dates, unread_feed_story_hashes def mark_newer_stories_read(self, cutoff_date): - if (self.unread_count_negative == 0 + if ( + self.unread_count_negative == 0 and self.unread_count_neutral == 0 and self.unread_count_positive == 0 - and not self.needs_unread_recalc): + and not self.needs_unread_recalc + ): return - + cutoff_date = cutoff_date - datetime.timedelta(seconds=1) - story_hashes = self.get_stories(limit=500, order="newest", cutoff_date=cutoff_date, - read_filter="unread", hashes_only=True) + story_hashes = self.get_stories( + limit=500, order="newest", cutoff_date=cutoff_date, read_filter="unread", hashes_only=True + ) data = self.mark_story_ids_as_read(story_hashes, aggregated=True) return data - - def mark_story_ids_as_read(self, story_hashes, feed_id=None, mark_all_read=False, request=None, aggregated=False): + + def mark_story_ids_as_read( + self, story_hashes, feed_id=None, mark_all_read=False, request=None, aggregated=False + ): data = dict(code=0, payload=story_hashes) r = redis.Redis(connection_pool=settings.REDIS_POOL) - + if not request: request = User.objects.get(pk=self.user_id) - + if not self.needs_unread_recalc and not mark_all_read: self.needs_unread_recalc = True self.save() - + sub_username = User.objects.get(pk=self.subscription_user_id).username - + if len(story_hashes) > 1: - logging.user(request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username)) + logging.user( + request, "~FYRead %s stories in social subscription: %s" % (len(story_hashes), sub_username) + ) else: logging.user(request, "~FYRead story in social subscription: %s" % (sub_username)) - - + for story_hash in set(story_hashes): if feed_id is not None: story_hash = MStory.ensure_story_hash(story_hash, story_feed_id=feed_id) if feed_id is None: feed_id, _ = MStory.split_story_hash(story_hash) - + if len(story_hashes) == 1: RUserStory.aggregate_mark_read(feed_id) - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (self.user_id) share_key = "S:%s" % (story_hash) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] - - RUserStory.mark_read(self.user_id, feed_id, story_hash, social_user_ids=friends_with_shares, - aggregated=(mark_all_read or aggregated)) - + + RUserStory.mark_read( + self.user_id, + feed_id, + story_hash, + social_user_ids=friends_with_shares, + aggregated=(mark_all_read or aggregated), + ) + if self.user_id in friends_with_shares: friends_with_shares.remove(self.user_id) if friends_with_shares: socialsubs = MSocialSubscription.objects.filter( - user_id=self.user_id, - subscription_user_id__in=friends_with_shares) + user_id=self.user_id, subscription_user_id__in=friends_with_shares + ) for socialsub in socialsubs: if not socialsub.needs_unread_recalc and not mark_all_read: socialsub.needs_unread_recalc = True socialsub.save() - + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=self.user_id, feed=feed_id) if usersubs: @@ -1252,38 +1392,37 @@ def mark_story_ids_as_read(self, story_hashes, feed_id=None, mark_all_read=False if not usersub.needs_unread_recalc: usersub.needs_unread_recalc = True usersub.save() - + return data - + @classmethod - def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_id=None, - request=None): + def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_id=None, request=None): data = dict(code=0, payload=story_ids) r = redis.Redis(connection_pool=settings.REDIS_POOL) if not request: request = User.objects.get(pk=user_id) - + if len(story_ids) > 1: logging.user(request, "~FYRead %s social stories from global" % (len(story_ids))) else: logging.user(request, "~FYRead social story from global") - + for story_id in set(story_ids): try: - story = MSharedStory.objects.get(user_id=social_user_id, - story_guid=story_id) + story = MSharedStory.objects.get(user_id=social_user_id, story_guid=story_id) except MSharedStory.DoesNotExist: continue - + # Find other social feeds with this story to update their counts friend_key = "F:%s:F" % (user_id) share_key = "S:%s" % (story.story_hash) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] - - RUserStory.mark_read(user_id, story.story_feed_id, story.story_hash, - social_user_ids=friends_with_shares) - + + RUserStory.mark_read( + user_id, story.story_feed_id, story.story_hash, social_user_ids=friends_with_shares + ) + # Also count on original subscription usersubs = UserSubscription.objects.filter(user=user_id, feed=story.story_feed_id) if usersubs: @@ -1293,26 +1432,32 @@ def mark_unsub_story_ids_as_read(cls, user_id, social_user_id, story_ids, feed_i usersub.save() # XXX TODO: Real-time notification, just for this user return data - + def mark_feed_read(self, cutoff_date=None): user_profile = Profile.objects.get(user_id=self.user_id) recount = True - + if cutoff_date: cutoff_date = cutoff_date + datetime.timedelta(seconds=1) else: # Use the latest story to get last read time. now = datetime.datetime.now() - latest_shared_story = MSharedStory.objects(user_id=self.subscription_user_id, - shared_date__gte=user_profile.unread_cutoff, - story_date__lte=now - ).order_by('-shared_date').only('shared_date').first() + latest_shared_story = ( + MSharedStory.objects( + user_id=self.subscription_user_id, + shared_date__gte=user_profile.unread_cutoff, + story_date__lte=now, + ) + .order_by("-shared_date") + .only("shared_date") + .first() + ) if latest_shared_story: - cutoff_date = latest_shared_story['shared_date'] + datetime.timedelta(seconds=1) + cutoff_date = latest_shared_story["shared_date"] + datetime.timedelta(seconds=1) else: cutoff_date = datetime.datetime.utcnow() recount = False - + self.last_read_date = cutoff_date self.mark_read_date = cutoff_date self.oldest_unread_story_date = cutoff_date @@ -1324,18 +1469,19 @@ def mark_feed_read(self, cutoff_date=None): self.needs_unread_recalc = False else: self.needs_unread_recalc = True - + # Manually mark all shared stories as read. - unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True, - mark_read_complement=True) + unread_story_hashes = self.get_stories( + read_filter="unread", limit=500, hashes_only=True, mark_read_complement=True + ) self.mark_story_ids_as_read(unread_story_hashes, mark_all_read=True) - + self.save() - + def calculate_feed_scores(self, force=False, silent=False): if not self.needs_unread_recalc and not force: return self - + now = datetime.datetime.now() user_profile = Profile.objects.get(user_id=self.user_id) @@ -1343,9 +1489,9 @@ def calculate_feed_scores(self, force=False, silent=False): # if not silent: # logging.info(' ---> [%s] SKIPPING Computing scores: %s (1 week+)' % (self.user, self.feed)) return self - + feed_scores = dict(negative=0, neutral=0, positive=0) - + # Two weeks in age. If mark_read_date is older, mark old stories as read. date_delta = user_profile.unread_cutoff if date_delta < self.mark_read_date: @@ -1353,95 +1499,117 @@ def calculate_feed_scores(self, force=False, silent=False): else: self.mark_read_date = date_delta - unread_story_hashes = self.get_stories(read_filter='unread', limit=500, hashes_only=True, - cutoff_date=date_delta) - stories_db = MSharedStory.objects(user_id=self.subscription_user_id, - story_hash__in=unread_story_hashes) + unread_story_hashes = self.get_stories( + read_filter="unread", limit=500, hashes_only=True, cutoff_date=date_delta + ) + stories_db = MSharedStory.objects( + user_id=self.subscription_user_id, story_hash__in=unread_story_hashes + ) story_feed_ids = set() for s in stories_db: - story_feed_ids.add(s['story_feed_id']) + story_feed_ids.add(s["story_feed_id"]) story_feed_ids = list(story_feed_ids) usersubs = UserSubscription.objects.filter(user__pk=self.user_id, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) - + oldest_unread_story_date = now unread_stories_db = [] for story in stories_db: - if story['story_hash'] not in unread_story_hashes: + if story["story_hash"] not in unread_story_hashes: continue feed_id = story.story_feed_id if usersubs_map.get(feed_id) and story.shared_date < usersubs_map[feed_id].mark_read_date: continue - + unread_stories_db.append(story) if story.shared_date < oldest_unread_story_date: oldest_unread_story_date = story.shared_date stories = Feed.format_stories(unread_stories_db) - classifier_feeds = list(MClassifierFeed.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_authors = list(MClassifierAuthor.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) - classifier_tags = list(MClassifierTag.objects(user_id=self.user_id, social_user_id=self.subscription_user_id)) + classifier_feeds = list( + MClassifierFeed.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_authors = list( + MClassifierAuthor.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_titles = list( + MClassifierTitle.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) + classifier_tags = list( + MClassifierTag.objects(user_id=self.user_id, social_user_id=self.subscription_user_id) + ) # Merge with feed specific classifiers if story_feed_ids: - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=self.user_id, - feed_id__in=story_feed_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=self.user_id, feed_id__in=story_feed_ids) + ) for story in stories: scores = { - 'feed' : apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=self.subscription_user_id), - 'author' : apply_classifier_authors(classifier_authors, story), - 'tags' : apply_classifier_tags(classifier_tags, story), - 'title' : apply_classifier_titles(classifier_titles, story), + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=self.subscription_user_id + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - - max_score = max(scores['author'], scores['tags'], scores['title']) - min_score = min(scores['author'], scores['tags'], scores['title']) - + + max_score = max(scores["author"], scores["tags"], scores["title"]) + min_score = min(scores["author"], scores["tags"], scores["title"]) + if max_score > 0: - feed_scores['positive'] += 1 + feed_scores["positive"] += 1 elif min_score < 0: - feed_scores['negative'] += 1 + feed_scores["negative"] += 1 else: - if scores['feed'] > 0: - feed_scores['positive'] += 1 - elif scores['feed'] < 0: - feed_scores['negative'] += 1 + if scores["feed"] > 0: + feed_scores["positive"] += 1 + elif scores["feed"] < 0: + feed_scores["negative"] += 1 else: - feed_scores['neutral'] += 1 - - - self.unread_count_positive = feed_scores['positive'] - self.unread_count_neutral = feed_scores['neutral'] - self.unread_count_negative = feed_scores['negative'] + feed_scores["neutral"] += 1 + + self.unread_count_positive = feed_scores["positive"] + self.unread_count_neutral = feed_scores["neutral"] + self.unread_count_negative = feed_scores["negative"] self.unread_count_updated = datetime.datetime.now() self.oldest_unread_story_date = oldest_unread_story_date self.needs_unread_recalc = False - + self.save() - if (self.unread_count_positive == 0 and - self.unread_count_neutral == 0): + if self.unread_count_positive == 0 and self.unread_count_neutral == 0: self.mark_feed_read() - + if not silent: - logging.info(' ---> [%s] Computing social scores: %s (%s/%s/%s)' % (user_profile, self.subscription_user_id, feed_scores['negative'], feed_scores['neutral'], feed_scores['positive'])) - + logging.info( + " ---> [%s] Computing social scores: %s (%s/%s/%s)" + % ( + user_profile, + self.subscription_user_id, + feed_scores["negative"], + feed_scores["neutral"], + feed_scores["positive"], + ) + ) + return self - + @classmethod def mark_dirty_sharing_story(cls, user_id, story_feed_id, story_guid_hash): r = redis.Redis(connection_pool=settings.REDIS_POOL) - + friends_key = "F:%s:F" % (user_id) share_key = "S:%s:%s" % (story_feed_id, story_guid_hash) following_user_ids = r.sinter(friends_key, share_key) @@ -1455,90 +1623,99 @@ def mark_dirty_sharing_story(cls, user_id, story_feed_id, story_guid_hash): social_sub.save() return social_subs + class MCommentReply(mongo.EmbeddedDocument): - reply_id = mongo.ObjectIdField() - user_id = mongo.IntField() - publish_date = mongo.DateTimeField() - comments = mongo.StringField() - email_sent = mongo.BooleanField(default=False) - liking_users = mongo.ListField(mongo.IntField()) - + reply_id = mongo.ObjectIdField() + user_id = mongo.IntField() + publish_date = mongo.DateTimeField() + comments = mongo.StringField() + email_sent = mongo.BooleanField(default=False) + liking_users = mongo.ListField(mongo.IntField()) + def canonical(self): reply = { - 'reply_id': self.reply_id, - 'user_id': self.user_id, - 'publish_date': relative_timesince(self.publish_date), - 'date': self.publish_date, - 'comments': self.comments, + "reply_id": self.reply_id, + "user_id": self.user_id, + "publish_date": relative_timesince(self.publish_date), + "date": self.publish_date, + "comments": self.comments, } return reply - + meta = { - 'ordering': ['publish_date'], - 'id_field': 'reply_id', - 'allow_inheritance': False, - 'strict': False, + "ordering": ["publish_date"], + "id_field": "reply_id", + "allow_inheritance": False, + "strict": False, } class MSharedStory(mongo.DynamicDocument): - user_id = mongo.IntField() - shared_date = mongo.DateTimeField() - comments = mongo.StringField() - has_comments = mongo.BooleanField(default=False) - has_replies = mongo.BooleanField(default=False) - replies = mongo.ListField(mongo.EmbeddedDocumentField(MCommentReply)) - source_user_id = mongo.IntField() - story_hash = mongo.StringField() - story_feed_id = mongo.IntField() - story_date = mongo.DateTimeField() - story_title = mongo.StringField(max_length=1024) - story_content = mongo.StringField() - story_content_z = mongo.BinaryField() - story_original_content = mongo.StringField() + user_id = mongo.IntField() + shared_date = mongo.DateTimeField() + comments = mongo.StringField() + has_comments = mongo.BooleanField(default=False) + has_replies = mongo.BooleanField(default=False) + replies = mongo.ListField(mongo.EmbeddedDocumentField(MCommentReply)) + source_user_id = mongo.IntField() + story_hash = mongo.StringField() + story_feed_id = mongo.IntField() + story_date = mongo.DateTimeField() + story_title = mongo.StringField(max_length=1024) + story_content = mongo.StringField() + story_content_z = mongo.BinaryField() + story_original_content = mongo.StringField() story_original_content_z = mongo.BinaryField() - original_text_z = mongo.BinaryField() - original_page_z = mongo.BinaryField() - story_content_type = mongo.StringField(max_length=255) - story_author_name = mongo.StringField() - story_permalink = mongo.StringField() - story_guid = mongo.StringField(unique_with=('user_id',)) - story_guid_hash = mongo.StringField(max_length=6) - image_urls = mongo.ListField(mongo.StringField(max_length=1024)) - story_tags = mongo.ListField(mongo.StringField(max_length=250)) - posted_to_services = mongo.ListField(mongo.StringField(max_length=20)) - mute_email_users = mongo.ListField(mongo.IntField()) - liking_users = mongo.ListField(mongo.IntField()) - emailed_reshare = mongo.BooleanField(default=False) - emailed_replies = mongo.ListField(mongo.ObjectIdField()) - image_count = mongo.IntField() - image_sizes = mongo.ListField(mongo.DictField()) - + original_text_z = mongo.BinaryField() + original_page_z = mongo.BinaryField() + story_content_type = mongo.StringField(max_length=255) + story_author_name = mongo.StringField() + story_permalink = mongo.StringField() + story_guid = mongo.StringField(unique_with=("user_id",)) + story_guid_hash = mongo.StringField(max_length=6) + image_urls = mongo.ListField(mongo.StringField(max_length=1024)) + story_tags = mongo.ListField(mongo.StringField(max_length=250)) + posted_to_services = mongo.ListField(mongo.StringField(max_length=20)) + mute_email_users = mongo.ListField(mongo.IntField()) + liking_users = mongo.ListField(mongo.IntField()) + emailed_reshare = mongo.BooleanField(default=False) + emailed_replies = mongo.ListField(mongo.ObjectIdField()) + image_count = mongo.IntField() + image_sizes = mongo.ListField(mongo.DictField()) + meta = { - 'collection': 'shared_stories', - 'indexes': [('user_id', '-shared_date'), ('user_id', 'story_feed_id'), - 'shared_date', 'story_guid', 'story_feed_id', 'story_hash'], - 'ordering': ['-shared_date'], - 'allow_inheritance': False, - 'strict': False, + "collection": "shared_stories", + "indexes": [ + ("user_id", "-shared_date"), + ("user_id", "story_feed_id"), + "shared_date", + "story_guid", + "story_feed_id", + "story_hash", + ], + "ordering": ["-shared_date"], + "allow_inheritance": False, + "strict": False, } def __str__(self): user = User.objects.get(pk=self.user_id) - return "%s: %s (%s)%s%s" % (user.username, - self.decoded_story_title[:20], - self.story_feed_id, - ': ' if self.has_comments else '', - self.comments[:20]) + return "%s: %s (%s)%s%s" % ( + user.username, + self.decoded_story_title[:20], + self.story_feed_id, + ": " if self.has_comments else "", + self.comments[:20], + ) @property def guid_hash(self): - return hashlib.sha1(self.story_guid.encode('utf-8')).hexdigest()[:6] - + return hashlib.sha1(self.story_guid.encode("utf-8")).hexdigest()[:6] + @property def feed_guid_hash(self): return "%s:%s" % (self.story_feed_id or "0", self.guid_hash) - + @property def decoded_story_title(self): return pyhtml.unescape(self.story_title) @@ -1550,7 +1727,7 @@ def story_content_str(self): story_content = smart_str(zlib.decompress(self.story_content_z)) else: story_content = smart_str(story_content) - + return story_content def canonical(self): @@ -1561,7 +1738,7 @@ def canonical(self): "story_content": self.story_content_z and zlib.decompress(self.story_content_z), "comments": self.comments, } - + def save(self, *args, **kwargs): scrubber = SelectiveScriptScrubber() @@ -1583,23 +1760,29 @@ def save(self, *args, **kwargs): self.shared_date = self.shared_date or datetime.datetime.utcnow() self.has_replies = bool(len(self.replies)) - + super(MSharedStory, self).save(*args, **kwargs) - + author = MSocialProfile.get_user(self.user_id) author.count_follows() - + self.sync_redis() - - MActivity.new_shared_story(user_id=self.user_id, source_user_id=self.source_user_id, - story_title=self.story_title, - comments=self.comments, story_feed_id=self.story_feed_id, - story_id=self.story_guid, share_date=self.shared_date) + + MActivity.new_shared_story( + user_id=self.user_id, + source_user_id=self.source_user_id, + story_title=self.story_title, + comments=self.comments, + story_feed_id=self.story_feed_id, + story_id=self.story_guid, + share_date=self.shared_date, + ) return self - + def delete(self, *args, **kwargs): - MActivity.remove_shared_story(user_id=self.user_id, story_feed_id=self.story_feed_id, - story_id=self.story_guid) + MActivity.remove_shared_story( + user_id=self.user_id, story_feed_id=self.story_feed_id, story_id=self.story_guid + ) self.remove_from_redis() @@ -1608,48 +1791,52 @@ def delete(self, *args, **kwargs): @classmethod def trim_old_stories(cls, stories=10, days=90, dryrun=False): print(" ---> Fetching shared story counts...") - stats = settings.MONGODB.newsblur.shared_stories.aggregate([{ - "$group": { - "_id": "$user_id", - "stories": {"$sum": 1}, - }, - }, { - "$match": { - "stories": {"$gte": stories} - }, - }]) + stats = settings.MONGODB.newsblur.shared_stories.aggregate( + [ + { + "$group": { + "_id": "$user_id", + "stories": {"$sum": 1}, + }, + }, + { + "$match": {"stories": {"$gte": stories}}, + }, + ] + ) month_ago = datetime.datetime.now() - datetime.timedelta(days=days) user_ids = list(stats) - user_ids = sorted(user_ids, key=lambda x:x['stories'], reverse=True) + user_ids = sorted(user_ids, key=lambda x: x["stories"], reverse=True) print(" ---> Found %s users with more than %s starred stories" % (len(user_ids), stories)) total = 0 for stat in user_ids: try: - user = User.objects.select_related('profile').get(pk=stat['_id']) + user = User.objects.select_related("profile").get(pk=stat["_id"]) except User.DoesNotExist: user = None - + if user and (user.profile.is_premium or user.profile.last_seen_on > month_ago): continue - - total += stat['stories'] - username = "%s (%s)" % (user and user.username or " - ", stat['_id']) - print(" ---> %19.19s: %-20.20s %s stories" % (user and user.profile.last_seen_on or "Deleted", - username, - stat['stories'])) - if not dryrun and stat['_id']: - cls.objects.filter(user_id=stat['_id']).delete() - elif not dryrun and stat['_id'] == 0: + + total += stat["stories"] + username = "%s (%s)" % (user and user.username or " - ", stat["_id"]) + print( + " ---> %19.19s: %-20.20s %s stories" + % (user and user.profile.last_seen_on or "Deleted", username, stat["stories"]) + ) + if not dryrun and stat["_id"]: + cls.objects.filter(user_id=stat["_id"]).delete() + elif not dryrun and stat["_id"] == 0: print(" ---> Deleting unshared stories (user_id = 0)") - cls.objects.filter(user_id=stat['_id']).delete() - - + cls.objects.filter(user_id=stat["_id"]).delete() + print(" ---> Deleted %s stories in total." % total) - + def unshare_story(self): - socialsubs = MSocialSubscription.objects.filter(subscription_user_id=self.user_id, - needs_unread_recalc=False) + socialsubs = MSocialSubscription.objects.filter( + subscription_user_id=self.user_id, needs_unread_recalc=False + ) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() @@ -1660,23 +1847,30 @@ def publish_to_subscribers(self): feed = Feed.get_by_id(self.story_feed_id) try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - r.publish("social:%s:story" % (self.user_id), '%s,%s' % (self.story_hash, self.shared_date.strftime('%s'))) - logging.debug(" ***> [%-30s] ~BMPublishing to Redis for real-time." % (feed.title[:30] if feed else "NO FEED")) + r.publish( + "social:%s:story" % (self.user_id), + "%s,%s" % (self.story_hash, self.shared_date.strftime("%s")), + ) + logging.debug( + " ***> [%-30s] ~BMPublishing to Redis for real-time." + % (feed.title[:30] if feed else "NO FEED") + ) except redis.ConnectionError: - logging.debug(" ***> [%-30s] ~BMRedis is unavailable for real-time." % (feed.title[:30] if feed else "NO FEED")) - + logging.debug( + " ***> [%-30s] ~BMRedis is unavailable for real-time." + % (feed.title[:30] if feed else "NO FEED") + ) + @classmethod def feed_quota(cls, user_id, story_hash, feed_id=None, days=1, quota=1): - day_ago = datetime.datetime.now()-datetime.timedelta(days=days) - params = dict(user_id=user_id, - shared_date__gte=day_ago, - story_hash__nin=[story_hash]) + day_ago = datetime.datetime.now() - datetime.timedelta(days=days) + params = dict(user_id=user_id, shared_date__gte=day_ago, story_hash__nin=[story_hash]) if feed_id: - params['story_feed_id'] = feed_id + params["story_feed_id"] = feed_id shared_count = cls.objects.filter(**params).count() return shared_count >= quota - + @classmethod def count_potential_spammers(cls, days=1, destroy=False): try: @@ -1684,50 +1878,59 @@ def count_potential_spammers(cls, days=1, destroy=False): except NameError: logging.debug(" ---> ~FR~SNMissing ~SBspam.py~SN") guaranteed_spammers = [] - + return guaranteed_spammers - + @classmethod def get_shared_stories_from_site(cls, feed_id, user_id, story_url, limit=3): - your_story = cls.objects.filter(story_feed_id=feed_id, - story_permalink=story_url, - user_id=user_id).limit(1).first() - same_stories = cls.objects.filter(story_feed_id=feed_id, - story_permalink=story_url, - user_id__ne=user_id - ).order_by('-shared_date') - - same_stories = [{ - "user_id": story.user_id, - "comments": story.comments, - "relative_date": relative_timesince(story.shared_date), - "blurblog_permalink": story.blurblog_permalink(), - } for story in same_stories] - - other_stories = [] - if feed_id: - other_stories = cls.objects.filter(story_feed_id=feed_id, - story_permalink__ne=story_url - ).order_by('-shared_date').limit(limit) - other_stories = [{ + your_story = ( + cls.objects.filter(story_feed_id=feed_id, story_permalink=story_url, user_id=user_id) + .limit(1) + .first() + ) + same_stories = cls.objects.filter( + story_feed_id=feed_id, story_permalink=story_url, user_id__ne=user_id + ).order_by("-shared_date") + + same_stories = [ + { "user_id": story.user_id, - "story_title": story.story_title, - "story_permalink": story.story_permalink, "comments": story.comments, "relative_date": relative_timesince(story.shared_date), "blurblog_permalink": story.blurblog_permalink(), - } for story in other_stories] - + } + for story in same_stories + ] + + other_stories = [] + if feed_id: + other_stories = ( + cls.objects.filter(story_feed_id=feed_id, story_permalink__ne=story_url) + .order_by("-shared_date") + .limit(limit) + ) + other_stories = [ + { + "user_id": story.user_id, + "story_title": story.story_title, + "story_permalink": story.story_permalink, + "comments": story.comments, + "relative_date": relative_timesince(story.shared_date), + "blurblog_permalink": story.blurblog_permalink(), + } + for story in other_stories + ] + return your_story, same_stories, other_stories - + def set_source_user_id(self, source_user_id): if source_user_id == self.user_id: return - + def find_source(source_user_id, seen_user_ids): - parent_shared_story = MSharedStory.objects.filter(user_id=source_user_id, - story_guid=self.story_guid, - story_feed_id=self.story_feed_id).limit(1) + parent_shared_story = MSharedStory.objects.filter( + user_id=source_user_id, story_guid=self.story_guid, story_feed_id=self.story_feed_id + ).limit(1) if parent_shared_story and parent_shared_story[0].source_user_id: user_id = parent_shared_story[0].source_user_id if user_id in seen_user_ids: @@ -1737,7 +1940,7 @@ def find_source(source_user_id, seen_user_ids): return find_source(user_id, seen_user_ids) else: return source_user_id - + if source_user_id: source_user_id = find_source(source_user_id, []) if source_user_id == self.user_id: @@ -1746,19 +1949,21 @@ def find_source(source_user_id, seen_user_ids): self.source_user_id = source_user_id logging.debug(" ---> Re-share from %s." % source_user_id) self.save() - - MInteraction.new_reshared_story(user_id=self.source_user_id, - reshare_user_id=self.user_id, - comments=self.comments, - story_title=self.story_title, - story_feed_id=self.story_feed_id, - story_id=self.story_guid) - + + MInteraction.new_reshared_story( + user_id=self.source_user_id, + reshare_user_id=self.user_id, + comments=self.comments, + story_title=self.story_title, + story_feed_id=self.story_feed_id, + story_id=self.story_guid, + ) + def mute_for_user(self, user_id): if user_id not in self.mute_email_users: self.mute_email_users.append(user_id) self.save() - + @classmethod def switch_feed(cls, original_feed_id, duplicate_feed_id): shared_stories = cls.objects.filter(story_feed_id=duplicate_feed_id) @@ -1766,7 +1971,7 @@ def switch_feed(cls, original_feed_id, duplicate_feed_id): for story in shared_stories: story.story_feed_id = original_feed_id story.save() - + @classmethod def collect_popular_stories(cls, cutoff=None, days=None, shared_feed_ids=None): if not days: @@ -1778,7 +1983,7 @@ def collect_popular_stories(cls, cutoff=None, days=None, shared_feed_ids=None): # shared_stories_count = sum(json.decode(MStatistics.get('stories_shared'))) # cutoff = cutoff or max(math.floor(.025 * shared_stories_count), 3) today = datetime.datetime.now() - datetime.timedelta(days=days) - + map_f = """ function() { emit(this.story_hash, { @@ -1809,74 +2014,82 @@ def collect_popular_stories(cls, cutoff=None, days=None, shared_feed_ids=None): return value; } } - """ % {'cutoff': cutoff, 'shared_feed_ids': ', '.join(shared_feed_ids)} - res = cls.objects(shared_date__gte=today).map_reduce(map_f, reduce_f, - finalize_f=finalize_f, - output='inline') + """ % { + "cutoff": cutoff, + "shared_feed_ids": ", ".join(shared_feed_ids), + } + res = cls.objects(shared_date__gte=today).map_reduce( + map_f, reduce_f, finalize_f=finalize_f, output="inline" + ) stories = dict([(r.key, r.value) for r in res if r.value]) return stories, cutoff - + @classmethod def share_popular_stories(cls, cutoff=None, days=None, interactive=True): publish_new_stories = False - popular_profile = MSocialProfile.objects.get(user_id=User.objects.get(username='popular').pk) + popular_profile = MSocialProfile.objects.get(user_id=User.objects.get(username="popular").pk) popular_user = User.objects.get(pk=popular_profile.user_id) week_ago = datetime.datetime.now() - datetime.timedelta(days=7) - shared_feed_ids = [str(s.story_feed_id) - for s in MSharedStory.objects(user_id=popular_profile.user_id, - shared_date__gte=week_ago).only('story_feed_id')] - shared_stories_today, cutoff = cls.collect_popular_stories(cutoff=cutoff, days=days, - shared_feed_ids=shared_feed_ids) + shared_feed_ids = [ + str(s.story_feed_id) + for s in MSharedStory.objects(user_id=popular_profile.user_id, shared_date__gte=week_ago).only( + "story_feed_id" + ) + ] + shared_stories_today, cutoff = cls.collect_popular_stories( + cutoff=cutoff, days=days, shared_feed_ids=shared_feed_ids + ) shared = 0 - + for story_hash, story_info in list(shared_stories_today.items()): - story, _ = MStory.find_story(story_info['feed_id'], story_info['story_hash']) + story, _ = MStory.find_story(story_info["feed_id"], story_info["story_hash"]) if not story: logging.user(popular_user, "~FRPopular stories, story not found: %s" % story_info) continue if story.story_feed_id in shared_feed_ids: logging.user(popular_user, "~FRPopular stories, story feed just shared: %s" % story_info) continue - + if interactive: feed = Feed.get_by_id(story.story_feed_id) accept_story = eval(input("%s / %s [Y/n]: " % (story.decoded_story_title, feed.title))) - if accept_story in ['n', 'N']: continue - - story_db = dict([(k, v) for k, v in list(story._data.items()) - if k is not None and v is not None]) - story_db.pop('user_id', None) - story_db.pop('id', None) - story_db.pop('comments', None) - story_db.pop('replies', None) - story_db['has_comments'] = False - story_db['has_replies'] = False - story_db['shared_date'] = datetime.datetime.now() + if accept_story in ["n", "N"]: + continue + + story_db = dict([(k, v) for k, v in list(story._data.items()) if k is not None and v is not None]) + story_db.pop("user_id", None) + story_db.pop("id", None) + story_db.pop("comments", None) + story_db.pop("replies", None) + story_db["has_comments"] = False + story_db["has_replies"] = False + story_db["shared_date"] = datetime.datetime.now() story_values = { - 'user_id': popular_profile.user_id, - 'story_guid': story_db['story_guid'], + "user_id": popular_profile.user_id, + "story_guid": story_db["story_guid"], } try: shared_story = MSharedStory.objects.get(**story_values) except MSharedStory.DoesNotExist: story_values.update(story_db) shared_story = MSharedStory.objects.create(**story_values) - shared_story.post_to_service('twitter') + shared_story.post_to_service("twitter") shared += 1 shared_feed_ids.append(story.story_feed_id) publish_new_stories = True - logging.user(popular_user, "~FCSharing: ~SB~FM%s (%s shares, %s min)" % ( - story.decoded_story_title[:50], - story_info['count'], - cutoff)) - + logging.user( + popular_user, + "~FCSharing: ~SB~FM%s (%s shares, %s min)" + % (story.decoded_story_title[:50], story_info["count"], cutoff), + ) + if publish_new_stories: socialsubs = MSocialSubscription.objects.filter(subscription_user_id=popular_user.pk) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() shared_story.publish_update_to_subscribers() - + return shared @staticmethod @@ -1884,13 +2097,13 @@ def check_shared_story_hashes(user_id, story_hashes, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_POOL) pipeline = r.pipeline() - + for story_hash in story_hashes: feed_id, guid_hash = MStory.split_story_hash(story_hash) share_key = "S:%s:%s" % (feed_id, guid_hash) pipeline.sismember(share_key, user_id) shared_hashes = pipeline.execute() - + return [story_hash for s, story_hash in enumerate(story_hashes) if shared_hashes[s]] @classmethod @@ -1907,7 +2120,7 @@ def sync_all_redis(cls, drop=False): for story in cls.objects.all(): story.sync_redis_shares(r=r) story.sync_redis_story(r=h) - + def sync_redis(self): self.sync_redis_shares() self.sync_redis_story() @@ -1915,8 +2128,8 @@ def sync_redis(self): def sync_redis_shares(self, r=None): if not r: r = redis.Redis(connection_pool=settings.REDIS_POOL) - - share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) + + share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) comment_key = "C:%s:%s" % (self.story_feed_id, self.guid_hash) r.sadd(share_key, self.user_id) if self.has_comments: @@ -1929,20 +2142,18 @@ def sync_redis_story(self, r=None): r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) # if not r2: # r2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2) - - r.sadd('B:%s' % self.user_id, self.feed_guid_hash) + + r.sadd("B:%s" % self.user_id, self.feed_guid_hash) # r2.sadd('B:%s' % self.user_id, self.feed_guid_hash) - redis_data = { - self.feed_guid_hash : time.mktime(self.shared_date.timetuple()) - } - r.zadd('zB:%s' % self.user_id, redis_data) + redis_data = {self.feed_guid_hash: time.mktime(self.shared_date.timetuple())} + r.zadd("zB:%s" % self.user_id, redis_data) # r2.zadd('zB:%s' % self.user_id, {self.feed_guid_hash: # time.mktime(self.shared_date.timetuple())}) - r.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) + r.expire("B:%s" % self.user_id, settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) # r2.expire('B:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) - r.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) + r.expire("zB:%s" % self.user_id, settings.DAYS_OF_STORY_HASHES * 24 * 60 * 60) # r2.expire('zB:%s' % self.user_id, settings.DAYS_OF_STORY_HASHES*24*60*60) - + def remove_from_redis(self): r = redis.Redis(connection_pool=settings.REDIS_POOL) share_key = "S:%s:%s" % (self.story_feed_id, self.guid_hash) @@ -1953,16 +2164,16 @@ def remove_from_redis(self): h = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) # h2 = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL2) - h.srem('B:%s' % self.user_id, self.feed_guid_hash) + h.srem("B:%s" % self.user_id, self.feed_guid_hash) # h2.srem('B:%s' % self.user_id, self.feed_guid_hash) - h.zrem('zB:%s' % self.user_id, self.feed_guid_hash) + h.zrem("zB:%s" % self.user_id, self.feed_guid_hash) # h2.zrem('zB:%s' % self.user_id, self.feed_guid_hash) def publish_update_to_subscribers(self): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) feed_id = "social:%s" % self.user_id - listeners_count = r.publish("%s:story" % feed_id, 'story:new:%s' % self.story_hash) + listeners_count = r.publish("%s:story" % feed_id, "story:new:%s" % self.story_hash) if listeners_count: logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count)) except redis.ConnectionError: @@ -1970,178 +2181,191 @@ def publish_update_to_subscribers(self): def comments_with_author(self): comments = { - 'id': self.id, - 'user_id': self.user_id, - 'comments': self.comments, - 'shared_date': relative_timesince(self.shared_date), - 'date': self.shared_date, - 'replies': [reply.canonical() for reply in self.replies], - 'liking_users': self.liking_users and list(self.liking_users), - 'source_user_id': self.source_user_id, + "id": self.id, + "user_id": self.user_id, + "comments": self.comments, + "shared_date": relative_timesince(self.shared_date), + "date": self.shared_date, + "replies": [reply.canonical() for reply in self.replies], + "liking_users": self.liking_users and list(self.liking_users), + "source_user_id": self.source_user_id, } return comments - + def comment_with_author_and_profiles(self): comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = [reply['user_id'] for reply in comment['replies']] + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] profile_user_ids = profile_user_ids.union(reply_user_ids) - profile_user_ids = profile_user_ids.union(comment['liking_users']) - if comment['source_user_id']: - profile_user_ids.add(comment['source_user_id']) + profile_user_ids = profile_user_ids.union(comment["liking_users"]) + if comment["source_user_id"]: + profile_user_ids.add(comment["source_user_id"]) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] return comment, profiles - + @classmethod def stories_with_comments_and_profiles(cls, stories, user_id, check_all=False): r = redis.Redis(connection_pool=settings.REDIS_POOL) friend_key = "F:%s:F" % (user_id) profile_user_ids = set() - for story in stories: - story['friend_comments'] = [] - story['friend_shares'] = [] - story['public_comments'] = [] - story['reply_count'] = 0 - if check_all or story['comment_count']: - comment_key = "C:%s:%s" % (story['story_feed_id'], story['guid_hash']) - story['comment_count'] = r.scard(comment_key) + for story in stories: + story["friend_comments"] = [] + story["friend_shares"] = [] + story["public_comments"] = [] + story["reply_count"] = 0 + if check_all or story["comment_count"]: + comment_key = "C:%s:%s" % (story["story_feed_id"], story["guid_hash"]) + story["comment_count"] = r.scard(comment_key) friends_with_comments = [int(f) for f in r.sinter(comment_key, friend_key)] sharer_user_ids = [int(f) for f in r.smembers(comment_key)] shared_stories = [] if sharer_user_ids: params = { - 'story_hash': story['story_hash'], - 'user_id__in': sharer_user_ids, + "story_hash": story["story_hash"], + "user_id__in": sharer_user_ids, } - if 'story_db_id' in params: - params.pop('story_db_id') - shared_stories = cls.objects.filter(**params)\ - .hint([('story_hash', 1)]) + if "story_db_id" in params: + params.pop("story_db_id") + shared_stories = cls.objects.filter(**params).hint([("story_hash", 1)]) for shared_story in shared_stories: comments = shared_story.comments_with_author() - story['reply_count'] += len(comments['replies']) + story["reply_count"] += len(comments["replies"]) if shared_story.user_id in friends_with_comments: - story['friend_comments'].append(comments) + story["friend_comments"].append(comments) else: - story['public_comments'].append(comments) - if comments.get('source_user_id'): - profile_user_ids.add(comments['source_user_id']) - if comments.get('liking_users'): - profile_user_ids = profile_user_ids.union(comments['liking_users']) - all_comments = story['friend_comments'] + story['public_comments'] - profile_user_ids = profile_user_ids.union([reply['user_id'] - for c in all_comments - for reply in c['replies']]) - if story.get('source_user_id'): - profile_user_ids.add(story['source_user_id']) - story['comment_count_friends'] = len(friends_with_comments) - story['comment_count_public'] = story['comment_count'] - len(friends_with_comments) - - if check_all or story['share_count']: - share_key = "S:%s:%s" % (story['story_feed_id'], story['guid_hash']) - story['share_count'] = r.scard(share_key) + story["public_comments"].append(comments) + if comments.get("source_user_id"): + profile_user_ids.add(comments["source_user_id"]) + if comments.get("liking_users"): + profile_user_ids = profile_user_ids.union(comments["liking_users"]) + all_comments = story["friend_comments"] + story["public_comments"] + profile_user_ids = profile_user_ids.union( + [reply["user_id"] for c in all_comments for reply in c["replies"]] + ) + if story.get("source_user_id"): + profile_user_ids.add(story["source_user_id"]) + story["comment_count_friends"] = len(friends_with_comments) + story["comment_count_public"] = story["comment_count"] - len(friends_with_comments) + + if check_all or story["share_count"]: + share_key = "S:%s:%s" % (story["story_feed_id"], story["guid_hash"]) + story["share_count"] = r.scard(share_key) friends_with_shares = [int(f) for f in r.sinter(share_key, friend_key)] nonfriend_user_ids = [int(f) for f in r.sdiff(share_key, friend_key)] profile_user_ids.update(nonfriend_user_ids) profile_user_ids.update(friends_with_shares) - story['commented_by_public'] = [c['user_id'] for c in story['public_comments']] - story['commented_by_friends'] = [c['user_id'] for c in story['friend_comments']] - story['shared_by_public'] = list(set(nonfriend_user_ids) - - set(story['commented_by_public'])) - story['shared_by_friends'] = list(set(friends_with_shares) - - set(story['commented_by_friends'])) - story['share_count_public'] = story['share_count'] - len(friends_with_shares) - story['share_count_friends'] = len(friends_with_shares) - story['friend_user_ids'] = list(set(story['commented_by_friends'] + story['shared_by_friends'])) - story['public_user_ids'] = list(set(story['commented_by_public'] + story['shared_by_public'])) - if not story['share_user_ids']: - story['share_user_ids'] = story['friend_user_ids'] + story['public_user_ids'] - if story.get('source_user_id'): - profile_user_ids.add(story['source_user_id']) + story["commented_by_public"] = [c["user_id"] for c in story["public_comments"]] + story["commented_by_friends"] = [c["user_id"] for c in story["friend_comments"]] + story["shared_by_public"] = list(set(nonfriend_user_ids) - set(story["commented_by_public"])) + story["shared_by_friends"] = list( + set(friends_with_shares) - set(story["commented_by_friends"]) + ) + story["share_count_public"] = story["share_count"] - len(friends_with_shares) + story["share_count_friends"] = len(friends_with_shares) + story["friend_user_ids"] = list( + set(story["commented_by_friends"] + story["shared_by_friends"]) + ) + story["public_user_ids"] = list(set(story["commented_by_public"] + story["shared_by_public"])) + if not story["share_user_ids"]: + story["share_user_ids"] = story["friend_user_ids"] + story["public_user_ids"] + if story.get("source_user_id"): + profile_user_ids.add(story["source_user_id"]) shared_stories = [] - if story['shared_by_friends']: + if story["shared_by_friends"]: params = { - 'story_hash': story['story_hash'], - 'user_id__in': story['shared_by_friends'], + "story_hash": story["story_hash"], + "user_id__in": story["shared_by_friends"], } - shared_stories = cls.objects.filter(**params)\ - .hint([('story_hash', 1)]) + shared_stories = cls.objects.filter(**params).hint([("story_hash", 1)]) for shared_story in shared_stories: comments = shared_story.comments_with_author() - story['reply_count'] += len(comments['replies']) - story['friend_shares'].append(comments) - profile_user_ids = profile_user_ids.union([reply['user_id'] - for reply in comments['replies']]) - if comments.get('source_user_id'): - profile_user_ids.add(comments['source_user_id']) - if comments.get('liking_users'): - profile_user_ids = profile_user_ids.union(comments['liking_users']) - + story["reply_count"] += len(comments["replies"]) + story["friend_shares"].append(comments) + profile_user_ids = profile_user_ids.union( + [reply["user_id"] for reply in comments["replies"]] + ) + if comments.get("source_user_id"): + profile_user_ids.add(comments["source_user_id"]) + if comments.get("liking_users"): + profile_user_ids = profile_user_ids.union(comments["liking_users"]) + profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) - + # Toss public comments by private profiles and muted users - profiles_dict = dict((profile['user_id'], profile) for profile in profiles) + profiles_dict = dict((profile["user_id"], profile) for profile in profiles) for story in stories: - commented_by_public = story.get('commented_by_public') or [c['user_id'] for c in story['public_comments']] + commented_by_public = story.get("commented_by_public") or [ + c["user_id"] for c in story["public_comments"] + ] for comment_user_id in commented_by_public: private = profiles_dict[comment_user_id].private muted = user_id in profiles_dict[comment_user_id].muted_by_user_ids if private or muted: - story['public_comments'] = [c for c in story['public_comments'] if c['user_id'] != comment_user_id] - story['comment_count_public'] -= 1 + story["public_comments"] = [ + c for c in story["public_comments"] if c["user_id"] != comment_user_id + ] + story["comment_count_public"] -= 1 profiles = [profile.canonical(compact=True) for profile in profiles] - + return stories, profiles - + @staticmethod def attach_users_to_stories(stories, profiles): - profiles = dict([(p['user_id'], p) for p in profiles]) + profiles = dict([(p["user_id"], p) for p in profiles]) for s, story in enumerate(stories): - for u, user_id in enumerate(story['shared_by_friends']): - if user_id not in profiles: continue - stories[s]['shared_by_friends'][u] = profiles[user_id] - for u, user_id in enumerate(story['shared_by_public']): - if user_id not in profiles: continue - stories[s]['shared_by_public'][u] = profiles[user_id] - for comment_set in ['friend_comments', 'public_comments', 'friend_shares']: + for u, user_id in enumerate(story["shared_by_friends"]): + if user_id not in profiles: + continue + stories[s]["shared_by_friends"][u] = profiles[user_id] + for u, user_id in enumerate(story["shared_by_public"]): + if user_id not in profiles: + continue + stories[s]["shared_by_public"][u] = profiles[user_id] + for comment_set in ["friend_comments", "public_comments", "friend_shares"]: for c, comment in enumerate(story[comment_set]): - if comment['user_id'] not in profiles: continue - stories[s][comment_set][c]['user'] = profiles[comment['user_id']] - if comment['source_user_id'] and comment['source_user_id'] in profiles: - stories[s][comment_set][c]['source_user'] = profiles[comment['source_user_id']] - for r, reply in enumerate(comment['replies']): - if reply['user_id'] not in profiles: continue - stories[s][comment_set][c]['replies'][r]['user'] = profiles[reply['user_id']] - stories[s][comment_set][c]['liking_user_ids'] = list(comment['liking_users']) - for u, user_id in enumerate(comment['liking_users']): - if user_id not in profiles: continue - stories[s][comment_set][c]['liking_users'][u] = profiles[user_id] + if comment["user_id"] not in profiles: + continue + stories[s][comment_set][c]["user"] = profiles[comment["user_id"]] + if comment["source_user_id"] and comment["source_user_id"] in profiles: + stories[s][comment_set][c]["source_user"] = profiles[comment["source_user_id"]] + for r, reply in enumerate(comment["replies"]): + if reply["user_id"] not in profiles: + continue + stories[s][comment_set][c]["replies"][r]["user"] = profiles[reply["user_id"]] + stories[s][comment_set][c]["liking_user_ids"] = list(comment["liking_users"]) + for u, user_id in enumerate(comment["liking_users"]): + if user_id not in profiles: + continue + stories[s][comment_set][c]["liking_users"][u] = profiles[user_id] return stories - + @staticmethod def attach_users_to_comment(comment, profiles): - profiles = dict([(p['user_id'], p) for p in profiles]) + profiles = dict([(p["user_id"], p) for p in profiles]) - if comment['user_id'] not in profiles: return comment - comment['user'] = profiles[comment['user_id']] + if comment["user_id"] not in profiles: + return comment + comment["user"] = profiles[comment["user_id"]] - if comment['source_user_id']: - comment['source_user'] = profiles[comment['source_user_id']] + if comment["source_user_id"]: + comment["source_user"] = profiles[comment["source_user_id"]] - for r, reply in enumerate(comment['replies']): - if reply['user_id'] not in profiles: continue - comment['replies'][r]['user'] = profiles[reply['user_id']] - comment['liking_user_ids'] = list(comment['liking_users']) - for u, user_id in enumerate(comment['liking_users']): - if user_id not in profiles: continue - comment['liking_users'][u] = profiles[user_id] + for r, reply in enumerate(comment["replies"]): + if reply["user_id"] not in profiles: + continue + comment["replies"][r]["user"] = profiles[reply["user_id"]] + comment["liking_user_ids"] = list(comment["liking_users"]) + for u, user_id in enumerate(comment["liking_users"]): + if user_id not in profiles: + continue + comment["liking_users"][u] = profiles[user_id] return comment - + def add_liking_user(self, user_id): if user_id not in self.liking_users: self.liking_users.append(user_id) @@ -2151,15 +2375,11 @@ def remove_liking_user(self, user_id): if user_id in self.liking_users: self.liking_users.remove(user_id) self.save() - + def blurblog_permalink(self): profile = MSocialProfile.get_user(self.user_id) - return "%sstory/%s/%s" % ( - profile.blurblog_url, - slugify(self.story_title)[:20], - self.story_hash - ) - + return "%sstory/%s/%s" % (profile.blurblog_url, slugify(self.story_title)[:20], self.story_hash) + def generate_post_to_service_message(self, truncate=None, include_url=True): message = strip_tags(self.comments) if not message or len(message) < 1: @@ -2178,55 +2398,56 @@ def generate_post_to_service_message(self, truncate=None, include_url=True): if truncate: message = truncate_chars(message, truncate - 24) message += " " + self.blurblog_permalink() - + return message - + def post_to_service(self, service): user = User.objects.get(pk=self.user_id) - + if service in self.posted_to_services: logging.user(user, "~BM~FRAlready posted to %s." % (service)) return - - posts_last_hour = MSharedStory.objects.filter(user_id=self.user_id, - posted_to_services__contains=service, - shared_date__gte=datetime.datetime.now() - - datetime.timedelta(hours=1)).count() + + posts_last_hour = MSharedStory.objects.filter( + user_id=self.user_id, + posted_to_services__contains=service, + shared_date__gte=datetime.datetime.now() - datetime.timedelta(hours=1), + ).count() if posts_last_hour >= 3: logging.user(user, "~BM~FRPosted to %s > 3 times in past hour" % service) return - + posted = False social_service = MSocialServices.objects.get(user_id=self.user_id) - + message = self.generate_post_to_service_message() logging.user(user, "~BM~FGPosting to %s: ~SB%s" % (service, message)) - - if service == 'twitter': + + if service == "twitter": posted = social_service.post_to_twitter(self) - elif service == 'facebook': + elif service == "facebook": posted = social_service.post_to_facebook(self) - + if posted: self.posted_to_services.append(service) self.save() - + def notify_user_ids(self, include_parent=True): user_ids = set() for reply in self.replies: if reply.user_id not in self.mute_email_users: user_ids.add(reply.user_id) - + if include_parent and self.user_id not in self.mute_email_users: user_ids.add(self.user_id) - + return list(user_ids) - + def reply_for_id(self, reply_id): for reply in self.replies: if reply.reply_id == reply_id: return reply - + def send_emails_for_new_reply(self, reply_id): if reply_id in self.emailed_replies: logging.debug(" ***> Already sent reply email: %s on %s" % (reply_id, self)) @@ -2236,7 +2457,7 @@ def send_emails_for_new_reply(self, reply_id): if not reply: logging.debug(" ***> Reply doesn't exist: %s on %s" % (reply_id, self)) return - + notify_user_ids = self.notify_user_ids() if reply.user_id in notify_user_ids: notify_user_ids.remove(reply.user_id) @@ -2246,15 +2467,15 @@ def send_emails_for_new_reply(self, reply_id): story_feed = Feed.get_by_id(self.story_feed_id) comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = list(r['user_id'] for r in comment['replies']) + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = list(r["user_id"] for r in comment["replies"]) profile_user_ids = profile_user_ids.union(reply_user_ids) if self.source_user_id: profile_user_ids.add(self.source_user_id) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] comment = MSharedStory.attach_users_to_comment(comment, profiles) - + for user_id in notify_user_ids: user = User.objects.get(pk=user_id) @@ -2264,170 +2485,190 @@ def send_emails_for_new_reply(self, reply_id): elif not user.profile.send_emails: logging.user(user, "~FMDisabled emails, skipping.") continue - + mute_url = "http://%s%s" % ( Site.objects.get_current().domain, - reverse('social-mute-story', kwargs={ - 'secret_token': user.profile.secret_token, - 'shared_story_id': self.id, - }) + reverse( + "social-mute-story", + kwargs={ + "secret_token": user.profile.secret_token, + "shared_story_id": self.id, + }, + ), ) data = { - 'reply_user_profile': reply_user_profile, - 'comment': comment, - 'shared_story': self, - 'story_feed': story_feed, - 'mute_url': mute_url, + "reply_user_profile": reply_user_profile, + "comment": comment, + "shared_story": self, + "story_feed": story_feed, + "mute_url": mute_url, } - story_title = self.decoded_story_title.replace('\n', ' ') - - text = render_to_string('mail/email_reply.txt', data) - html = pynliner.fromString(render_to_string('mail/email_reply.xhtml', data)) - subject = "%s replied to you on \"%s\" on NewsBlur" % (reply_user.username, story_title) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (user.username, user.email)]) + story_title = self.decoded_story_title.replace("\n", " ") + + text = render_to_string("mail/email_reply.txt", data) + html = pynliner.fromString(render_to_string("mail/email_reply.xhtml", data)) + subject = '%s replied to you on "%s" on NewsBlur' % (reply_user.username, story_title) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (user.username, user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() sent_emails += 1 - - logging.user(reply_user, "~BB~FM~SBSending %s/%s email%s for new reply: %s" % ( - sent_emails, len(notify_user_ids), - '' if len(notify_user_ids) == 1 else 's', - self.decoded_story_title[:30])) - + + logging.user( + reply_user, + "~BB~FM~SBSending %s/%s email%s for new reply: %s" + % ( + sent_emails, + len(notify_user_ids), + "" if len(notify_user_ids) == 1 else "s", + self.decoded_story_title[:30], + ), + ) + self.emailed_replies.append(reply.reply_id) self.save() - + def send_email_for_reshare(self): if self.emailed_reshare: logging.debug(" ***> Already sent reply email: %s" % self) return - + reshare_user = User.objects.get(pk=self.user_id) reshare_user_profile = MSocialProfile.get_user(self.user_id) original_user = User.objects.get(pk=self.source_user_id) - original_shared_story = MSharedStory.objects.get(user_id=self.source_user_id, - story_guid=self.story_guid) - + original_shared_story = MSharedStory.objects.get( + user_id=self.source_user_id, story_guid=self.story_guid + ) + if not original_user.email or not original_user.profile.send_emails: if not original_user.email: logging.user(original_user, "~FMNo email to send to, skipping.") elif not original_user.profile.send_emails: logging.user(original_user, "~FMDisabled emails, skipping.") return - + story_feed = Feed.get_by_id(self.story_feed_id) comment = self.comments_with_author() - profile_user_ids = set([comment['user_id']]) - reply_user_ids = [reply['user_id'] for reply in comment['replies']] + profile_user_ids = set([comment["user_id"]]) + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] profile_user_ids = profile_user_ids.union(reply_user_ids) if self.source_user_id: profile_user_ids.add(self.source_user_id) profiles = MSocialProfile.objects.filter(user_id__in=list(profile_user_ids)) profiles = [profile.canonical(compact=True) for profile in profiles] comment = MSharedStory.attach_users_to_comment(comment, profiles) - + mute_url = "http://%s%s" % ( Site.objects.get_current().domain, - reverse('social-mute-story', kwargs={ - 'secret_token': original_user.profile.secret_token, - 'shared_story_id': original_shared_story.id, - }) + reverse( + "social-mute-story", + kwargs={ + "secret_token": original_user.profile.secret_token, + "shared_story_id": original_shared_story.id, + }, + ), ) data = { - 'comment': comment, - 'shared_story': self, - 'reshare_user_profile': reshare_user_profile, - 'original_shared_story': original_shared_story, - 'story_feed': story_feed, - 'mute_url': mute_url, + "comment": comment, + "shared_story": self, + "reshare_user_profile": reshare_user_profile, + "original_shared_story": original_shared_story, + "story_feed": story_feed, + "mute_url": mute_url, } - story_title = self.decoded_story_title.replace('\n', ' ') - - text = render_to_string('mail/email_reshare.txt', data) - html = pynliner.fromString(render_to_string('mail/email_reshare.xhtml', data)) - subject = "%s re-shared \"%s\" from you on NewsBlur" % (reshare_user.username, story_title) - msg = EmailMultiAlternatives(subject, text, - from_email='NewsBlur <%s>' % settings.HELLO_EMAIL, - to=['%s <%s>' % (original_user.username, original_user.email)]) + story_title = self.decoded_story_title.replace("\n", " ") + + text = render_to_string("mail/email_reshare.txt", data) + html = pynliner.fromString(render_to_string("mail/email_reshare.xhtml", data)) + subject = '%s re-shared "%s" from you on NewsBlur' % (reshare_user.username, story_title) + msg = EmailMultiAlternatives( + subject, + text, + from_email="NewsBlur <%s>" % settings.HELLO_EMAIL, + to=["%s <%s>" % (original_user.username, original_user.email)], + ) msg.attach_alternative(html, "text/html") msg.send() - + self.emailed_reshare = True self.save() - - logging.user(reshare_user, "~BB~FM~SBSending %s email for story re-share: %s" % ( - original_user.username, - self.decoded_story_title[:30])) - + + logging.user( + reshare_user, + "~BB~FM~SBSending %s email for story re-share: %s" + % (original_user.username, self.decoded_story_title[:30]), + ) + def extract_image_urls(self, force=False): if not self.story_content_z: return if self.image_urls and not force: return - + soup = BeautifulSoup(zlib.decompress(self.story_content_z), features="lxml") - image_sources = [img.get('src') for img in soup.findAll('img') if img and img.get('src')] + image_sources = [img.get("src") for img in soup.findAll("img") if img and img.get("src")] if len(image_sources) > 0: self.image_urls = image_sources max_length = MSharedStory.image_urls.field.max_length - while len(''.join(self.image_urls)) > max_length: + while len("".join(self.image_urls)) > max_length: if len(self.image_urls) <= 1: - self.image_urls[0] = self.image_urls[0][:max_length-1] + self.image_urls[0] = self.image_urls[0][: max_length - 1] break else: self.image_urls.pop() self.save() - + def calculate_image_sizes(self, force=False): if not self.story_content_z: return - + if not force and self.image_count: return self.image_sizes - + headers = { - 'User-Agent': 'NewsBlur Image Fetcher - %s' % ( - settings.NEWSBLUR_URL - ), + "User-Agent": "NewsBlur Image Fetcher - %s" % (settings.NEWSBLUR_URL), } - + self.extract_image_urls() image_sizes = [] - + for image_source in self.image_urls[:10]: if any(ignore in image_source for ignore in IGNORE_IMAGE_SOURCES): continue width, height = ImageOps.image_size(image_source, headers=headers) # if width <= 16 or height <= 16: # continue - image_sizes.append({'src': image_source, 'size': (width, height)}) - + image_sizes.append({"src": image_source, "size": (width, height)}) + if image_sizes: - image_sizes = sorted(image_sizes, key=lambda i: i['size'][0] * i['size'][1], - reverse=True) + image_sizes = sorted(image_sizes, key=lambda i: i["size"][0] * i["size"][1], reverse=True) self.image_sizes = image_sizes self.image_count = len(image_sizes) self.save() - - logging.debug(" ---> ~SN~FGFetched image sizes on shared story: ~SB%s/%s images" % - (self.image_count, len(self.image_urls))) - + + logging.debug( + " ---> ~SN~FGFetched image sizes on shared story: ~SB%s/%s images" + % (self.image_count, len(self.image_urls)) + ) + return image_sizes - + def fetch_original_text(self, force=False, request=None, debug=False): original_text_z = self.original_text_z feed = Feed.get_by_id(self.story_feed_id) - + if not original_text_z or force: ti = TextImporter(self, feed=feed, request=request, debug=False) original_text = ti.fetch() else: logging.user(request, "~FYFetching ~FGoriginal~FY story text, ~SBfound.") original_text = zlib.decompress(original_text_z) - + return original_text def fetch_original_page(self, force=False, request=None, debug=False): @@ -2438,62 +2679,61 @@ def fetch_original_page(self, force=False, request=None, debug=False): else: logging.user(request, "~FYFetching ~FGoriginal~FY story page, ~SBfound.") original_page = zlib.decompress(self.original_page_z) - + return original_page + class MSocialServices(mongo.Document): - user_id = mongo.IntField() - autofollow = mongo.BooleanField(default=True) - twitter_uid = mongo.StringField() - twitter_access_key = mongo.StringField() + user_id = mongo.IntField() + autofollow = mongo.BooleanField(default=True) + twitter_uid = mongo.StringField() + twitter_access_key = mongo.StringField() twitter_access_secret = mongo.StringField() - twitter_friend_ids = mongo.ListField(mongo.StringField()) - twitter_picture_url = mongo.StringField() - twitter_username = mongo.StringField() - twitter_refresh_date = mongo.DateTimeField() - facebook_uid = mongo.StringField() + twitter_friend_ids = mongo.ListField(mongo.StringField()) + twitter_picture_url = mongo.StringField() + twitter_username = mongo.StringField() + twitter_refresh_date = mongo.DateTimeField() + facebook_uid = mongo.StringField() facebook_access_token = mongo.StringField() - facebook_friend_ids = mongo.ListField(mongo.StringField()) - facebook_picture_url = mongo.StringField() + facebook_friend_ids = mongo.ListField(mongo.StringField()) + facebook_picture_url = mongo.StringField() facebook_refresh_date = mongo.DateTimeField() - upload_picture_url = mongo.StringField() - syncing_twitter = mongo.BooleanField(default=False) - syncing_facebook = mongo.BooleanField(default=False) - + upload_picture_url = mongo.StringField() + syncing_twitter = mongo.BooleanField(default=False) + syncing_facebook = mongo.BooleanField(default=False) + meta = { - 'collection': 'social_services', - 'indexes': ['user_id', 'twitter_friend_ids', 'facebook_friend_ids', 'twitter_uid', 'facebook_uid'], - 'allow_inheritance': False, - 'strict': False, + "collection": "social_services", + "indexes": ["user_id", "twitter_friend_ids", "facebook_friend_ids", "twitter_uid", "facebook_uid"], + "allow_inheritance": False, + "strict": False, } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "%s (Twitter: %s, FB: %s)" % (user.username, self.twitter_uid, self.facebook_uid) - + def canonical(self): user = User.objects.get(pk=self.user_id) return { - 'twitter': { - 'twitter_username': self.twitter_username, - 'twitter_picture_url': self.twitter_picture_url, - 'twitter_uid': self.twitter_uid, - 'syncing': self.syncing_twitter, + "twitter": { + "twitter_username": self.twitter_username, + "twitter_picture_url": self.twitter_picture_url, + "twitter_uid": self.twitter_uid, + "syncing": self.syncing_twitter, }, - 'facebook': { - 'facebook_uid': self.facebook_uid, - 'facebook_picture_url': self.facebook_picture_url, - 'syncing': self.syncing_facebook, + "facebook": { + "facebook_uid": self.facebook_uid, + "facebook_picture_url": self.facebook_picture_url, + "syncing": self.syncing_facebook, }, - 'gravatar': { - 'gravatar_picture_url': "https://www.gravatar.com/avatar/" + \ - hashlib.md5(user.email.lower().encode('utf-8')).hexdigest() + "gravatar": { + "gravatar_picture_url": "https://www.gravatar.com/avatar/" + + hashlib.md5(user.email.lower().encode("utf-8")).hexdigest() }, - 'upload': { - 'upload_picture_url': self.upload_picture_url - } + "upload": {"upload_picture_url": self.upload_picture_url}, } - + @classmethod def get_user(cls, user_id): try: @@ -2513,26 +2753,26 @@ def get_user(cls, user_id): if created: profile.save() return profile - + @classmethod def profile(cls, user_id): profile = cls.get_user(user_id=user_id) return profile.canonical() - + def save_uploaded_photo(self, photo): photo_body = photo.read() filename = photo.name s3 = s3_utils.S3Store() image_name = s3.save_profile_picture(self.user_id, filename, photo_body) - if image_name: + if image_name: self.upload_picture_url = "https://s3.amazonaws.com/%s/avatars/%s/thumbnail_%s" % ( settings.S3_AVATARS_BUCKET_NAME, self.user_id, image_name, ) self.save() - + return image_name and self.upload_picture_url def twitter_api(self): @@ -2542,42 +2782,42 @@ def twitter_api(self): auth.set_access_token(self.twitter_access_key, self.twitter_access_secret) api = tweepy.API(auth) return api - + def facebook_api(self): graph = facebook.GraphAPI(access_token=self.facebook_access_token, version="3.1") return graph - + def sync_twitter_friends(self): user = User.objects.get(pk=self.user_id) logging.user(user, "~BG~FMTwitter import starting...") - + api = self.twitter_api() try: twitter_user = api.me() except tweepy.TweepError as e: api = None - + if not api: logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no api access.") self.syncing_twitter = False self.save() return - + self.twitter_picture_url = twitter_user.profile_image_url_https self.twitter_username = twitter_user.screen_name self.twitter_refreshed_date = datetime.datetime.utcnow() self.syncing_twitter = False self.save() - + profile = MSocialProfile.get_user(self.user_id) profile.location = profile.location or twitter_user.location profile.bio = profile.bio or twitter_user.description profile.website = profile.website or twitter_user.url profile.save() profile.count_follows() - + if not profile.photo_url or not profile.photo_service: - self.set_photo('twitter') + self.set_photo("twitter") try: friend_ids = list(str(friend.id) for friend in list(tweepy.Cursor(api.friends).items())) @@ -2588,17 +2828,17 @@ def sync_twitter_friends(self): logging.user(user, "~BG~FMTwitter import ~SBfailed~SN: no friend_ids.") self.twitter_friend_ids = friend_ids self.save() - + following = self.follow_twitter_friends() - + if not following: logging.user(user, "~BG~FMTwitter import finished.") - + def follow_twitter_friends(self): social_profile = MSocialProfile.get_user(self.user_id) following = [] followers = 0 - + if not self.autofollow: return following @@ -2609,7 +2849,7 @@ def follow_twitter_friends(self): socialsub = social_profile.follow_user(followee_user_id) if socialsub: following.append(followee_user_id) - + # Friends already on NewsBlur should follow back # following_users = MSocialServices.objects.filter(twitter_friend_ids__contains=self.twitter_uid) # for following_user in following_users: @@ -2617,16 +2857,20 @@ def follow_twitter_friends(self): # following_user_profile = MSocialProfile.get_user(following_user.user_id) # following_user_profile.follow_user(self.user_id, check_unfollowed=True) # followers += 1 - + user = User.objects.get(pk=self.user_id) - logging.user(user, "~BG~FMTwitter import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.twitter_friend_ids), len(following), followers)) - + logging.user( + user, + "~BG~FMTwitter import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" + % (len(self.twitter_friend_ids), len(following), followers), + ) + return following - + def sync_facebook_friends(self): user = User.objects.get(pk=self.user_id) logging.user(user, "~BG~FMFacebook import starting...") - + graph = self.facebook_api() if not graph: logging.user(user, "~BG~FMFacebook import ~SBfailed~SN: no api access.") @@ -2647,25 +2891,27 @@ def sync_facebook_friends(self): self.facebook_picture_url = "https://graph.facebook.com/%s/picture" % self.facebook_uid self.syncing_facebook = False self.save() - - facebook_user = graph.request('me', args={'fields':'website,about,location'}) + + facebook_user = graph.request("me", args={"fields": "website,about,location"}) profile = MSocialProfile.get_user(self.user_id) - profile.location = profile.location or (facebook_user.get('location') and facebook_user['location']['name']) - profile.bio = profile.bio or facebook_user.get('about') - if not profile.website and facebook_user.get('website'): - profile.website = facebook_user.get('website').split()[0] + profile.location = profile.location or ( + facebook_user.get("location") and facebook_user["location"]["name"] + ) + profile.bio = profile.bio or facebook_user.get("about") + if not profile.website and facebook_user.get("website"): + profile.website = facebook_user.get("website").split()[0] profile.save() profile.count_follows() if not profile.photo_url or not profile.photo_service: - self.set_photo('facebook') - + self.set_photo("facebook") + self.follow_facebook_friends() - + def follow_facebook_friends(self): social_profile = MSocialProfile.get_user(self.user_id) following = [] followers = 0 - + if not self.autofollow: return following @@ -2676,7 +2922,7 @@ def follow_facebook_friends(self): socialsub = social_profile.follow_user(followee_user_id) if socialsub: following.append(followee_user_id) - + # Friends already on NewsBlur should follow back # following_users = MSocialServices.objects.filter(facebook_friend_ids__contains=self.facebook_uid) # for following_user in following_users: @@ -2684,47 +2930,52 @@ def follow_facebook_friends(self): # following_user_profile = MSocialProfile.get_user(following_user.user_id) # following_user_profile.follow_user(self.user_id, check_unfollowed=True) # followers += 1 - + user = User.objects.get(pk=self.user_id) - logging.user(user, "~BG~FMFacebook import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" % (len(self.facebook_friend_ids), len(following), followers)) - + logging.user( + user, + "~BG~FMFacebook import: %s users, now following ~SB%s~SN with ~SB%s~SN follower-backs" + % (len(self.facebook_friend_ids), len(following), followers), + ) + return following - + def disconnect_twitter(self): self.syncing_twitter = False self.twitter_uid = None self.save() - + def disconnect_facebook(self): self.syncing_facebook = False self.facebook_uid = None self.save() - + def set_photo(self, service): profile = MSocialProfile.get_user(self.user_id) - if service == 'nothing': + if service == "nothing": service = None profile.photo_service = service if not service: profile.photo_url = None - elif service == 'twitter': + elif service == "twitter": profile.photo_url = self.twitter_picture_url - elif service == 'facebook': + elif service == "facebook": profile.photo_url = self.facebook_picture_url - elif service == 'upload': + elif service == "upload": profile.photo_url = self.upload_picture_url - elif service == 'gravatar': + elif service == "gravatar": user = User.objects.get(pk=self.user_id) - profile.photo_url = "https://www.gravatar.com/avatar/" + \ - hashlib.md5(user.email.encode('utf-8')).hexdigest() + profile.photo_url = ( + "https://www.gravatar.com/avatar/" + hashlib.md5(user.email.encode("utf-8")).hexdigest() + ) profile.save() return profile - + @classmethod def sync_all_twitter_photos(cls, days=14, everybody=False): if everybody: - sharers = [ss.user_id for ss in MSocialServices.objects.all().only('user_id')] + sharers = [ss.user_id for ss in MSocialServices.objects.all().only("user_id")] elif days: week_ago = datetime.datetime.now() - datetime.timedelta(days=days) shares = MSharedStory.objects.filter(shared_date__gte=week_ago) @@ -2736,7 +2987,8 @@ def sync_all_twitter_photos(cls, days=14, everybody=False): profile = MSocialProfile.objects.get(user_id=user_id) except MSocialProfile.DoesNotExist: continue - if not profile.photo_service == 'twitter': continue + if not profile.photo_service == "twitter": + continue ss = MSocialServices.objects.get(user_id=user_id) try: ss.sync_twitter_photo() @@ -2749,10 +3001,10 @@ def sync_twitter_photo(self): if profile.photo_service != "twitter": return - + user = User.objects.get(pk=self.user_id) logging.user(user, "~FCSyncing Twitter profile photo...") - + try: api = self.twitter_api() me = api.me() @@ -2764,12 +3016,12 @@ def sync_twitter_photo(self): self.twitter_picture_url = me.profile_image_url_https self.save() - self.set_photo('twitter') - + self.set_photo("twitter") + def post_to_twitter(self, shared_story): message = shared_story.generate_post_to_service_message(truncate=280) shared_story.calculate_image_sizes() - + try: api = self.twitter_api() filename = self.fetch_image_file_for_twitter(shared_story) @@ -2782,93 +3034,101 @@ def post_to_twitter(self, shared_story): user = User.objects.get(pk=self.user_id) logging.user(user, "~FRTwitter error: ~SB%s" % e) return - + return True - + def fetch_image_file_for_twitter(self, shared_story): - if not shared_story.image_urls: return + if not shared_story.image_urls: + return user = User.objects.get(pk=self.user_id) logging.user(user, "~FCFetching image for twitter: ~SB%s" % shared_story.image_urls[0]) - + url = shared_story.image_urls[0] image_filename = os.path.basename(urllib.parse.urlparse(url).path) req = requests.get(url, stream=True, timeout=10) filename = "/tmp/%s-%s" % (shared_story.story_hash, image_filename) - + if req.status_code == 200: f = open(filename, "wb") for chunk in req: f.write(chunk) f.close() - + return filename - + def post_to_facebook(self, shared_story): message = shared_story.generate_post_to_service_message(include_url=False) shared_story.calculate_image_sizes() content = zlib.decompress(shared_story.story_content_z)[:1024] - + try: api = self.facebook_api() # api.put_wall_post(message=message) - api.put_object('me', '%s:share' % settings.FACEBOOK_NAMESPACE, - link=shared_story.blurblog_permalink(), - type="link", - name=shared_story.decoded_story_title, - description=content, - website=shared_story.blurblog_permalink(), - message=message, - ) + api.put_object( + "me", + "%s:share" % settings.FACEBOOK_NAMESPACE, + link=shared_story.blurblog_permalink(), + type="link", + name=shared_story.decoded_story_title, + description=content, + website=shared_story.blurblog_permalink(), + message=message, + ) except facebook.GraphAPIError as e: logging.debug("---> ~SN~FMFacebook posting error, disconnecting: ~SB~FR%s" % e) self.disconnect_facebook() return - + return True - + class MInteraction(mongo.Document): - user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - category = mongo.StringField() - title = mongo.StringField() - content = mongo.StringField() + user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + category = mongo.StringField() + title = mongo.StringField() + content = mongo.StringField() with_user_id = mongo.IntField() - feed_id = mongo.DynamicField() - story_feed_id= mongo.IntField() - content_id = mongo.StringField() - + feed_id = mongo.DynamicField() + story_feed_id = mongo.IntField() + content_id = mongo.StringField() + meta = { - 'collection': 'interactions', - 'indexes': [('user_id', '-date'), 'category', 'with_user_id'], - 'allow_inheritance': False, - 'ordering': ['-date'], + "collection": "interactions", + "indexes": [("user_id", "-date"), "category", "with_user_id"], + "allow_inheritance": False, + "ordering": ["-date"], } - + def __str__(self): user = User.objects.get(pk=self.user_id) with_user = self.with_user_id and User.objects.get(pk=self.with_user_id) - return "<%s> %s on %s: %s - %s" % (user.username, with_user and with_user.username, self.date, - self.category, self.content and self.content[:20]) - + return "<%s> %s on %s: %s - %s" % ( + user.username, + with_user and with_user.username, + self.date, + self.category, + self.content and self.content[:20], + ) + def canonical(self): story_hash = None if self.story_feed_id: story_hash = MStory.ensure_story_hash(self.content_id, story_feed_id=self.story_feed_id) return { - 'date': self.date, - 'category': self.category, - 'title': self.title, - 'content': self.content, - 'with_user_id': self.with_user_id, - 'feed_id': self.feed_id, - 'story_feed_id': self.story_feed_id, - 'content_id': self.content_id, - 'story_hash': story_hash, + "date": self.date, + "category": self.category, + "title": self.title, + "content": self.content, + "with_user_id": self.with_user_id, + "feed_id": self.feed_id, + "story_feed_id": self.story_feed_id, + "content_id": self.content_id, + "story_hash": story_hash, } - + @classmethod def trim(cls, user_id, limit=100): user = User.objects.get(pk=user_id) @@ -2877,22 +3137,24 @@ def trim(cls, user_id, limit=100): if interaction_count == 0: interaction_count = cls.objects.filter(user_id=user_id).count() - logging.user(user, "~FBNot trimming interactions, only ~SB%s~SN interactions found" % interaction_count) + logging.user( + user, "~FBNot trimming interactions, only ~SB%s~SN interactions found" % interaction_count + ) return - + logging.user(user, "~FBTrimming ~SB%s~SN interactions..." % interaction_count) for interaction in interactions: interaction.delete() logging.user(user, "~FBDone trimming ~SB%s~SN interactions" % interaction_count) - + @classmethod def publish_update_to_subscribers(self, user_id): user = User.objects.get(pk=user_id) try: r = redis.Redis(connection_pool=settings.REDIS_POOL) - listeners_count = r.publish(user.username, 'interaction:new') + listeners_count = r.publish(user.username, "interaction:new") if listeners_count: logging.debug(" ---> ~FMPublished to %s subscribers" % (listeners_count)) except redis.ConnectionError: @@ -2904,74 +3166,85 @@ def user(cls, user_id, page=1, limit=None, categories=None): dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on page = max(1, page) limit = int(limit) if limit else 4 - offset = (page-1) * limit - + offset = (page - 1) * limit + interactions_db = cls.objects.filter(user_id=user_id) if categories: interactions_db = interactions_db.filter(category__in=categories) - interactions_db = interactions_db[offset:offset+limit+1] - + interactions_db = interactions_db[offset : offset + limit + 1] + has_next_page = len(interactions_db) > limit - interactions_db = interactions_db[offset:offset+limit] + interactions_db = interactions_db[offset : offset + limit] with_user_ids = [i.with_user_id for i in interactions_db if i.with_user_id] - social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids)) - + social_profiles = dict( + (p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids) + ) + interactions = [] for interaction_db in interactions_db: interaction = interaction_db.canonical() social_profile = social_profiles.get(interaction_db.with_user_id) if social_profile: - interaction['photo_url'] = social_profile.profile_photo_url - interaction['with_user'] = social_profiles.get(interaction_db.with_user_id) - interaction['time_since'] = relative_timesince(interaction_db.date) - interaction['date'] = interaction_db.date - interaction['is_new'] = interaction_db.date > dashboard_date + interaction["photo_url"] = social_profile.profile_photo_url + interaction["with_user"] = social_profiles.get(interaction_db.with_user_id) + interaction["time_since"] = relative_timesince(interaction_db.date) + interaction["date"] = interaction_db.date + interaction["is_new"] = interaction_db.date > dashboard_date interactions.append(interaction) return interactions, has_next_page - + @classmethod def user_unread_count(cls, user_id): user_profile = Profile.objects.get(user=user_id) dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on - + interactions_count = cls.objects.filter(user_id=user_id, date__gte=dashboard_date).count() - + return interactions_count - + @classmethod def new_follow(cls, follower_user_id, followee_user_id): params = { - 'user_id': followee_user_id, - 'with_user_id': follower_user_id, - 'category': 'follow', + "user_id": followee_user_id, + "with_user_id": follower_user_id, + "category": "follow", } try: cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe follow interactions. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() - + cls.publish_update_to_subscribers(followee_user_id) - + @classmethod - def new_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_comment_reply( + cls, + user_id, + reply_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -2982,55 +3255,69 @@ def new_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, stor if not original_message: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) - + @classmethod def remove_comment_reply(cls, user_id, reply_user_id, reply_content, story_id, story_feed_id): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments): - params = dict(user_id=comment_user_id, - with_user_id=liking_user_id, - category="comment_like", - feed_id="social:%s" % comment_user_id, - story_feed_id=story_feed_id, - content_id=story_id) + def new_comment_like( + cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments + ): + params = dict( + user_id=comment_user_id, + with_user_id=liking_user_id, + category="comment_like", + feed_id="social:%s" % comment_user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: cls.objects.get(**params) except cls.DoesNotExist: params.update(dict(title=story_title, content=comments)) cls.objects.create(**params) - + cls.publish_update_to_subscribers(comment_user_id) @classmethod - def new_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_reply_reply( + cls, + user_id, + comment_user_id, + reply_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'reply_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "reply_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -3041,39 +3328,43 @@ def new_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, if not original_message: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def remove_reply_reply(cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id): + def remove_reply_reply( + cls, user_id, comment_user_id, reply_user_id, reply_content, story_id, story_feed_id + ): params = { - 'user_id': user_id, - 'with_user_id': reply_user_id, - 'category': 'reply_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reply_user_id, + "category": "reply_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + cls.publish_update_to_subscribers(user_id) - + @classmethod - def new_reshared_story(cls, user_id, reshare_user_id, comments, story_title, story_feed_id, story_id, original_comments=None): + def new_reshared_story( + cls, user_id, reshare_user_id, comments, story_title, story_feed_id, story_id, original_comments=None + ): params = { - 'user_id': user_id, - 'with_user_id': reshare_user_id, - 'category': 'story_reshare', - 'content': comments, - 'title': story_title, - 'feed_id': "social:%s" % reshare_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": reshare_user_id, + "category": "story_reshare", + "content": comments, + "title": story_title, + "feed_id": "social:%s" % reshare_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } if original_comments: - params['content'] = original_comments + params["content"] = original_comments original = cls.objects.filter(**params).limit(1) if original: interaction = original[0] @@ -3084,49 +3375,50 @@ def new_reshared_story(cls, user_id, reshare_user_id, comments, story_title, sto if not original_comments: cls.objects.create(**params) - + cls.publish_update_to_subscribers(user_id) + class MActivity(mongo.Document): - user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - category = mongo.StringField() - title = mongo.StringField() - content = mongo.StringField() + user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + category = mongo.StringField() + title = mongo.StringField() + content = mongo.StringField() with_user_id = mongo.IntField() - feed_id = mongo.DynamicField() - story_feed_id= mongo.IntField() - content_id = mongo.StringField() - + feed_id = mongo.DynamicField() + story_feed_id = mongo.IntField() + content_id = mongo.StringField() + meta = { - 'collection': 'activities', - 'indexes': [('user_id', '-date'), 'category', 'with_user_id'], - 'allow_inheritance': False, - 'ordering': ['-date'], + "collection": "activities", + "indexes": [("user_id", "-date"), "category", "with_user_id"], + "allow_inheritance": False, + "ordering": ["-date"], } - + def __str__(self): user = User.objects.get(pk=self.user_id) return "<%s> %s - %s" % (user.username, self.category, self.content and self.content[:20]) - + def canonical(self): story_hash = None if self.story_feed_id: story_hash = MStory.ensure_story_hash(self.content_id, story_feed_id=self.story_feed_id) return { - 'date': self.date, - 'category': self.category, - 'title': self.title, - 'content': self.content, - 'user_id': self.user_id, - 'with_user_id': self.with_user_id or self.user_id, - 'feed_id': self.feed_id or self.story_feed_id, - 'story_feed_id': self.story_feed_id or self.feed_id, - 'content_id': self.content_id, - 'story_hash': story_hash, + "date": self.date, + "category": self.category, + "title": self.title, + "content": self.content, + "user_id": self.user_id, + "with_user_id": self.with_user_id or self.user_id, + "feed_id": self.feed_id or self.story_feed_id, + "story_feed_id": self.story_feed_id or self.feed_id, + "content_id": self.content_id, + "story_hash": story_hash, } - + @classmethod def trim(cls, user_id, limit=100): user = User.objects.get(pk=user_id) @@ -3137,53 +3429,54 @@ def trim(cls, user_id, limit=100): activity_count = cls.objects.filter(user_id=user_id).count() logging.user(user, "~FBNot trimming activities, only ~SB%s~SN activities found" % activity_count) return - + logging.user(user, "~FBTrimming ~SB%s~SN activities..." % activity_count) for activity in activities: activity.delete() logging.user(user, "~FBDone trimming ~SB%s~SN activities" % activity_count) - + @classmethod def user(cls, user_id, page=1, limit=4, public=False, categories=None): user_profile = Profile.objects.get(user=user_id) dashboard_date = user_profile.dashboard_date or user_profile.last_seen_on page = max(1, page) limit = int(limit) - offset = (page-1) * limit - + offset = (page - 1) * limit + activities_db = cls.objects.filter(user_id=user_id) if categories: activities_db = activities_db.filter(category__in=categories) if public: - activities_db = activities_db.filter(category__nin=['star', 'feedsub', 'opml_import', 'opml_export']) - activities_db = activities_db[offset:offset+limit+1] - + activities_db = activities_db.filter( + category__nin=["star", "feedsub", "opml_import", "opml_export"] + ) + activities_db = activities_db[offset : offset + limit + 1] + has_next_page = len(activities_db) > limit - activities_db = activities_db[offset:offset+limit] + activities_db = activities_db[offset : offset + limit] with_user_ids = [a.with_user_id for a in activities_db if a.with_user_id] - social_profiles = dict((p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids)) + social_profiles = dict( + (p.user_id, p) for p in MSocialProfile.objects.filter(user_id__in=with_user_ids) + ) activities = [] for activity_db in activities_db: activity = activity_db.canonical() - activity['date'] = activity_db.date - activity['time_since'] = relative_timesince(activity_db.date) + activity["date"] = activity_db.date + activity["time_since"] = relative_timesince(activity_db.date) social_profile = social_profiles.get(activity_db.with_user_id) if social_profile: - activity['photo_url'] = social_profile.profile_photo_url - activity['is_new'] = activity_db.date > dashboard_date - activity['with_user'] = social_profiles.get(activity_db.with_user_id or activity_db.user_id) + activity["photo_url"] = social_profile.profile_photo_url + activity["is_new"] = activity_db.date > dashboard_date + activity["with_user"] = social_profiles.get(activity_db.with_user_id or activity_db.user_id) activities.append(activity) - + return activities, has_next_page - + @classmethod def new_starred_story(cls, user_id, story_title, story_feed_id, story_id): - params = dict(user_id=user_id, - category='star', - story_feed_id=story_feed_id, - content_id=story_id) + params = dict(user_id=user_id, category="star", story_feed_id=story_feed_id, content_id=story_id) try: cls.objects.get(**params) except cls.DoesNotExist: @@ -3193,19 +3486,19 @@ def new_starred_story(cls, user_id, story_title, story_feed_id, story_id): @classmethod def remove_starred_story(cls, user_id, story_feed_id, story_id): params = { - 'user_id': user_id, - 'category': 'star', - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "category": "star", + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + @classmethod def new_feed_subscription(cls, user_id, feed_id, feed_title): params = { "user_id": user_id, - "category": 'feedsub', + "category": "feedsub", "feed_id": feed_id, } try: @@ -3214,7 +3507,7 @@ def new_feed_subscription(cls, user_id, feed_id, feed_title): params.update(dict(content=feed_title)) cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe feed subscription activities. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() @@ -3223,11 +3516,11 @@ def new_feed_subscription(cls, user_id, feed_id, feed_title): def new_opml_import(cls, user_id, count): if count <= 0: return - + params = { "user_id": user_id, - "category": 'opml_import', - 'content': f"You imported an OPML file with {count} sites" + "category": "opml_import", + "content": f"You imported an OPML file with {count} sites", } cls.objects.create(**params) @@ -3235,44 +3528,53 @@ def new_opml_import(cls, user_id, count): def new_opml_export(cls, user_id, count, automated=False): params = { "user_id": user_id, - "category": 'opml_export', - 'content': f"You exported an OPML backup of {count} subscriptions" + "category": "opml_export", + "content": f"You exported an OPML backup of {count} subscriptions", } if automated: - params['content'] = f"An automatic OPML backup of {count} subscriptions was emailed to you" + params["content"] = f"An automatic OPML backup of {count} subscriptions was emailed to you" cls.objects.create(**params) - + @classmethod def new_follow(cls, follower_user_id, followee_user_id): params = { - 'user_id': follower_user_id, - 'with_user_id': followee_user_id, - 'category': 'follow', + "user_id": follower_user_id, + "with_user_id": followee_user_id, + "category": "follow", } try: cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) except cls.MultipleObjectsReturned: - dupes = cls.objects.filter(**params).order_by('-date') + dupes = cls.objects.filter(**params).order_by("-date") logging.debug(" ---> ~FRDeleting dupe follow activities. %s found." % dupes.count()) for dupe in dupes[1:]: dupe.delete() - + @classmethod - def new_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id, story_title=None, original_message=None): + def new_comment_reply( + cls, + user_id, + comment_user_id, + reply_content, + story_id, + story_feed_id, + story_title=None, + original_message=None, + ): params = { - 'user_id': user_id, - 'with_user_id': comment_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'title': story_title, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": comment_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "title": story_title, + "content_id": story_id, } if original_message: - params['content'] = original_message + params["content"] = original_message original = cls.objects.filter(**params).limit(1) if original: original = original[0] @@ -3283,47 +3585,51 @@ def new_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, st if not original_message: cls.objects.create(**params) - + @classmethod def remove_comment_reply(cls, user_id, comment_user_id, reply_content, story_id, story_feed_id): params = { - 'user_id': user_id, - 'with_user_id': comment_user_id, - 'category': 'comment_reply', - 'content': linkify(strip_tags(reply_content)), - 'feed_id': "social:%s" % comment_user_id, - 'story_feed_id': story_feed_id, - 'content_id': story_id, + "user_id": user_id, + "with_user_id": comment_user_id, + "category": "comment_reply", + "content": linkify(strip_tags(reply_content)), + "feed_id": "social:%s" % comment_user_id, + "story_feed_id": story_feed_id, + "content_id": story_id, } original = cls.objects.filter(**params) original.delete() - + @classmethod - def new_comment_like(cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments): - params = dict(user_id=liking_user_id, - with_user_id=comment_user_id, - category="comment_like", - feed_id="social:%s" % comment_user_id, - story_feed_id=story_feed_id, - content_id=story_id) + def new_comment_like( + cls, liking_user_id, comment_user_id, story_id, story_feed_id, story_title, comments + ): + params = dict( + user_id=liking_user_id, + with_user_id=comment_user_id, + category="comment_like", + feed_id="social:%s" % comment_user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: cls.objects.get(**params) except cls.DoesNotExist: params.update(dict(title=story_title, content=comments)) cls.objects.create(**params) - + @classmethod - def new_shared_story(cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None): + def new_shared_story( + cls, user_id, source_user_id, story_title, comments, story_feed_id, story_id, share_date=None + ): data = { "user_id": user_id, - "category": 'sharedstory', + "category": "sharedstory", "feed_id": "social:%s" % user_id, "story_feed_id": story_feed_id, "content_id": story_id, } - extradata = {'with_user_id': source_user_id, - 'title': story_title, - 'content': comments} + extradata = {"with_user_id": source_user_id, "title": story_title, "content": comments} try: a = cls.objects.get(**data) @@ -3351,20 +3657,22 @@ def new_shared_story(cls, user_id, source_user_id, story_title, comments, story_ @classmethod def remove_shared_story(cls, user_id, story_feed_id, story_id): - params = dict(user_id=user_id, - category='sharedstory', - feed_id="social:%s" % user_id, - story_feed_id=story_feed_id, - content_id=story_id) + params = dict( + user_id=user_id, + category="sharedstory", + feed_id="social:%s" % user_id, + story_feed_id=story_feed_id, + content_id=story_id, + ) try: a = cls.objects.get(**params) except cls.DoesNotExist: return except cls.MultipleObjectsReturned: a = cls.objects.filter(**params) - + a.delete() - + @classmethod def new_signup(cls, user_id): params = dict(user_id=user_id, with_user_id=user_id, category="signup") @@ -3375,17 +3683,17 @@ def new_signup(cls, user_id): class MFollowRequest(mongo.Document): - follower_user_id = mongo.IntField(unique_with='followee_user_id') - followee_user_id = mongo.IntField() - date = mongo.DateTimeField(default=datetime.datetime.now) - + follower_user_id = mongo.IntField(unique_with="followee_user_id") + followee_user_id = mongo.IntField() + date = mongo.DateTimeField(default=datetime.datetime.now) + meta = { - 'collection': 'follow_request', - 'indexes': ['follower_user_id', 'followee_user_id'], - 'ordering': ['-date'], - 'allow_inheritance': False, + "collection": "follow_request", + "indexes": ["follower_user_id", "followee_user_id"], + "ordering": ["-date"], + "allow_inheritance": False, } - + @classmethod def add(cls, follower_user_id, followee_user_id): params = dict(follower_user_id=follower_user_id, followee_user_id=followee_user_id) @@ -3393,9 +3701,7 @@ def add(cls, follower_user_id, followee_user_id): cls.objects.get(**params) except cls.DoesNotExist: cls.objects.create(**params) - + @classmethod def remove(cls, follower_user_id, followee_user_id): - cls.objects.filter(follower_user_id=follower_user_id, - followee_user_id=followee_user_id).delete() - + cls.objects.filter(follower_user_id=follower_user_id, followee_user_id=followee_user_id).delete() diff --git a/apps/social/tasks.py b/apps/social/tasks.py index d16f5c67a9..476450ad50 100644 --- a/apps/social/tasks.py +++ b/apps/social/tasks.py @@ -12,52 +12,61 @@ def PostToService(shared_story_id, service): shared_story.post_to_service(service) except MSharedStory.DoesNotExist: logging.debug(" ---> Shared story not found (%s). Can't post to: %s" % (shared_story_id, service)) - + + @app.task() def EmailNewFollower(follower_user_id, followee_user_id): user_profile = MSocialProfile.get_user(followee_user_id) user_profile.send_email_for_new_follower(follower_user_id) - + + @app.task() def EmailFollowRequest(follower_user_id, followee_user_id): user_profile = MSocialProfile.get_user(followee_user_id) user_profile.send_email_for_follow_request(follower_user_id) - + + @app.task() def EmailFirstShare(user_id): user = User.objects.get(pk=user_id) user.profile.send_first_share_to_blurblog_email() - + + @app.task() def EmailCommentReplies(shared_story_id, reply_id): shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story.send_emails_for_new_reply(ObjectId(reply_id)) - + + @app.task() def EmailStoryReshares(shared_story_id): shared_story = MSharedStory.objects.get(id=ObjectId(shared_story_id)) shared_story.send_email_for_reshare() - + + @app.task() def SyncTwitterFriends(user_id): social_services = MSocialServices.objects.get(user_id=user_id) social_services.sync_twitter_friends() + @app.task() def SyncFacebookFriends(user_id): social_services = MSocialServices.objects.get(user_id=user_id) social_services.sync_facebook_friends() - + + @app.task(name="share-popular-stories") def SharePopularStories(): logging.debug(" ---> Sharing popular stories...") MSharedStory.share_popular_stories(interactive=False) - -@app.task(name='clean-social-spam') + + +@app.task(name="clean-social-spam") def CleanSocialSpam(): logging.debug(" ---> Finding social spammers...") MSharedStory.count_potential_spammers(destroy=True) - + @app.task() def UpdateRecalcForSubscription(subscription_user_id, shared_story_id): @@ -68,12 +77,12 @@ def UpdateRecalcForSubscription(subscription_user_id, shared_story_id): except MSharedStory.DoesNotExist: return - logging.debug(" ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" % ( - socialsubs.count(), - user.username - )) + logging.debug( + " ---> ~FM~SNFlipping unread recalc for ~SB%s~SN subscriptions to ~SB%s's blurblog~SN" + % (socialsubs.count(), user.username) + ) for socialsub in socialsubs: socialsub.needs_unread_recalc = True socialsub.save() - + shared_story.publish_update_to_subscribers() diff --git a/apps/social/templatetags/social_tags.py b/apps/social/templatetags/social_tags.py index d07711381e..169995160f 100644 --- a/apps/social/templatetags/social_tags.py +++ b/apps/social/templatetags/social_tags.py @@ -4,63 +4,69 @@ register = template.Library() -@register.inclusion_tag('social/social_story.xhtml', takes_context=True) + +@register.inclusion_tag("social/social_story.xhtml", takes_context=True) def render_social_story(context, story, has_next_story=False): - user = context['user'] - user_social_profile = context['user_social_profile'] - + user = context["user"] + user_social_profile = context["user_social_profile"] + return { - 'story': story, - 'has_next_story': has_next_story, - 'user': user, - 'user_social_profile': user_social_profile, + "story": story, + "has_next_story": has_next_story, + "user": user, + "user_social_profile": user_social_profile, } -@register.inclusion_tag('social/story_share.xhtml', takes_context=True) + +@register.inclusion_tag("social/story_share.xhtml", takes_context=True) def render_story_share(context, story): - user = context['user'] + user = context["user"] return { - 'user': user, - 'story': story, + "user": user, + "story": story, } - -@register.inclusion_tag('social/story_comments.xhtml', takes_context=True) + + +@register.inclusion_tag("social/story_comments.xhtml", takes_context=True) def render_story_comments(context, story): - user = context['user'] - user_social_profile = context.get('user_social_profile') + user = context["user"] + user_social_profile = context.get("user_social_profile") MEDIA_URL = settings.MEDIA_URL if not user_social_profile and user.is_authenticated: user_social_profile = MSocialProfile.objects.get(user_id=user.pk) - + return { - 'user': user, - 'user_social_profile': user_social_profile, - 'story': story, - 'MEDIA_URL': MEDIA_URL, + "user": user, + "user_social_profile": user_social_profile, + "story": story, + "MEDIA_URL": MEDIA_URL, } -@register.inclusion_tag('social/story_comment.xhtml', takes_context=True) + +@register.inclusion_tag("social/story_comment.xhtml", takes_context=True) def render_story_comment(context, story, comment): - user = context['user'] + user = context["user"] MEDIA_URL = settings.MEDIA_URL - + return { - 'user': user, - 'story': story, - 'comment': comment, - 'MEDIA_URL': MEDIA_URL, + "user": user, + "story": story, + "comment": comment, + "MEDIA_URL": MEDIA_URL, } -@register.inclusion_tag('mail/email_story_comment.xhtml') + +@register.inclusion_tag("mail/email_story_comment.xhtml") def render_email_comment(comment): return { - 'comment': comment, + "comment": comment, } - -@register.inclusion_tag('social/avatars.xhtml') + + +@register.inclusion_tag("social/avatars.xhtml") def render_avatars(avatars): if not isinstance(avatars, list): avatars = [avatars] return { - 'users': avatars, + "users": avatars, } diff --git a/apps/social/urls.py b/apps/social/urls.py index 2b386b2179..1b5934d177 100644 --- a/apps/social/urls.py +++ b/apps/social/urls.py @@ -2,42 +2,68 @@ from apps.social import views urlpatterns = [ - url(r'^river_stories/?$', views.load_river_blurblog, name='social-river-blurblog'), - url(r'^share_story/?$', views.mark_story_as_shared, name='mark-story-as-shared'), - url(r'^unshare_story/?$', views.mark_story_as_unshared, name='mark-story-as-unshared'), - url(r'^load_user_friends/?$', views.load_user_friends, name='load-user-friends'), - url(r'^load_follow_requests/?$', views.load_follow_requests, name='load-follow-requests'), - url(r'^profile/?$', views.profile, name='profile'), - url(r'^load_user_profile/?$', views.load_user_profile, name='load-user-profile'), - url(r'^save_user_profile/?$', views.save_user_profile, name='save-user-profile'), - url(r'^upload_avatar/?', views.upload_avatar, name='upload-avatar'), - url(r'^save_blurblog_settings/?$', views.save_blurblog_settings, name='save-blurblog-settings'), - url(r'^interactions/?$', views.load_interactions, name='social-interactions'), - url(r'^activities/?$', views.load_activities, name='social-activities'), - url(r'^follow/?$', views.follow, name='social-follow'), - url(r'^unfollow/?$', views.unfollow, name='social-unfollow'), - url(r'^approve_follower/?$', views.approve_follower, name='social-approve-follower'), - url(r'^ignore_follower/?$', views.ignore_follower, name='social-ignore-follower'), - url(r'^mute_user/?$', views.mute_user, name='social-mute-user'), - url(r'^unmute_user/?$', views.unmute_user, name='social-unmute-user'), - url(r'^feed_trainer', views.social_feed_trainer, name='social-feed-trainer'), - url(r'^public_comments/?$', views.story_public_comments, name='story-public-comments'), - url(r'^save_comment_reply/?$', views.save_comment_reply, name='social-save-comment-reply'), - url(r'^remove_comment_reply/?$', views.remove_comment_reply, name='social-remove-comment-reply'), - url(r'^find_friends/?$', views.find_friends, name='social-find-friends'), - url(r'^like_comment/?$', views.like_comment, name='social-like-comment'), - url(r'^remove_like_comment/?$', views.remove_like_comment, name='social-remove-like-comment'), + url(r"^river_stories/?$", views.load_river_blurblog, name="social-river-blurblog"), + url(r"^share_story/?$", views.mark_story_as_shared, name="mark-story-as-shared"), + url(r"^unshare_story/?$", views.mark_story_as_unshared, name="mark-story-as-unshared"), + url(r"^load_user_friends/?$", views.load_user_friends, name="load-user-friends"), + url(r"^load_follow_requests/?$", views.load_follow_requests, name="load-follow-requests"), + url(r"^profile/?$", views.profile, name="profile"), + url(r"^load_user_profile/?$", views.load_user_profile, name="load-user-profile"), + url(r"^save_user_profile/?$", views.save_user_profile, name="save-user-profile"), + url(r"^upload_avatar/?", views.upload_avatar, name="upload-avatar"), + url(r"^save_blurblog_settings/?$", views.save_blurblog_settings, name="save-blurblog-settings"), + url(r"^interactions/?$", views.load_interactions, name="social-interactions"), + url(r"^activities/?$", views.load_activities, name="social-activities"), + url(r"^follow/?$", views.follow, name="social-follow"), + url(r"^unfollow/?$", views.unfollow, name="social-unfollow"), + url(r"^approve_follower/?$", views.approve_follower, name="social-approve-follower"), + url(r"^ignore_follower/?$", views.ignore_follower, name="social-ignore-follower"), + url(r"^mute_user/?$", views.mute_user, name="social-mute-user"), + url(r"^unmute_user/?$", views.unmute_user, name="social-unmute-user"), + url(r"^feed_trainer", views.social_feed_trainer, name="social-feed-trainer"), + url(r"^public_comments/?$", views.story_public_comments, name="story-public-comments"), + url(r"^save_comment_reply/?$", views.save_comment_reply, name="social-save-comment-reply"), + url(r"^remove_comment_reply/?$", views.remove_comment_reply, name="social-remove-comment-reply"), + url(r"^find_friends/?$", views.find_friends, name="social-find-friends"), + url(r"^like_comment/?$", views.like_comment, name="social-like-comment"), + url(r"^remove_like_comment/?$", views.remove_like_comment, name="social-remove-like-comment"), # url(r'^like_reply/?$', views.like_reply, name='social-like-reply'), # url(r'^remove_like_reply/?$', views.remove_like_reply, name='social-remove-like-reply'), - url(r'^comment/(?P\w+)/reply/(?P\w+)/?$', views.comment_reply, name='social-comment-reply'), - url(r'^comment/(?P\w+)/?$', views.comment, name='social-comment'), - url(r'^rss/(?P\d+)/?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'), - url(r'^rss/(?P\d+)/(?P[-\w]+)?$', views.shared_stories_rss_feed, name='shared-stories-rss-feed'), - url(r'^stories/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_stories, name='load-social-stories'), - url(r'^page/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_page, name='load-social-page'), - url(r'^settings/(?P\w+)/(?P[-\w]+)?/?$', views.load_social_settings, name='load-social-settings'), - url(r'^statistics/(?P\w+)/(?P[-\w]+)/?$', views.load_social_statistics, name='load-social-statistics'), - url(r'^statistics/(?P\w+)/?$', views.load_social_statistics, name='load-social-statistics'), - url(r'^mute_story/(?P\w+)/(?P\w+)?$', views.mute_story, name='social-mute-story'), - url(r'^(?P[-\w]+)/?$', views.shared_stories_public, name='shared-stories-public'), + url( + r"^comment/(?P\w+)/reply/(?P\w+)/?$", + views.comment_reply, + name="social-comment-reply", + ), + url(r"^comment/(?P\w+)/?$", views.comment, name="social-comment"), + url(r"^rss/(?P\d+)/?$", views.shared_stories_rss_feed, name="shared-stories-rss-feed"), + url( + r"^rss/(?P\d+)/(?P[-\w]+)?$", + views.shared_stories_rss_feed, + name="shared-stories-rss-feed", + ), + url( + r"^stories/(?P\w+)/(?P[-\w]+)?/?$", + views.load_social_stories, + name="load-social-stories", + ), + url(r"^page/(?P\w+)/(?P[-\w]+)?/?$", views.load_social_page, name="load-social-page"), + url( + r"^settings/(?P\w+)/(?P[-\w]+)?/?$", + views.load_social_settings, + name="load-social-settings", + ), + url( + r"^statistics/(?P\w+)/(?P[-\w]+)/?$", + views.load_social_statistics, + name="load-social-statistics", + ), + url( + r"^statistics/(?P\w+)/?$", views.load_social_statistics, name="load-social-statistics" + ), + url( + r"^mute_story/(?P\w+)/(?P\w+)?$", + views.mute_story, + name="social-mute-story", + ), + url(r"^(?P[-\w]+)/?$", views.shared_stories_public, name="shared-stories-public"), ] diff --git a/apps/social/views.py b/apps/social/views.py index ab8797b35f..513129020f 100644 --- a/apps/social/views.py +++ b/apps/social/views.py @@ -13,12 +13,23 @@ from django.conf import settings from django.utils import feedgenerator from apps.rss_feeds.models import MStory, Feed, MStarredStory -from apps.social.models import MSharedStory, MSocialServices, MSocialProfile, MSocialSubscription, MCommentReply +from apps.social.models import ( + MSharedStory, + MSocialServices, + MSocialProfile, + MSocialSubscription, + MCommentReply, +) from apps.social.models import MInteraction, MActivity, MFollowRequest from apps.social.tasks import PostToService, EmailCommentReplies, EmailStoryReshares from apps.social.tasks import UpdateRecalcForSubscription, EmailFirstShare from apps.analyzer.models import MClassifierTitle, MClassifierAuthor, MClassifierFeed, MClassifierTag -from apps.analyzer.models import apply_classifier_titles, apply_classifier_feeds, apply_classifier_authors, apply_classifier_tags +from apps.analyzer.models import ( + apply_classifier_titles, + apply_classifier_feeds, + apply_classifier_authors, + apply_classifier_tags, +) from apps.analyzer.models import get_classifiers_for_user, sort_classifiers_by_feed from apps.reader.models import UserSubscription from apps.profile.models import Profile @@ -37,56 +48,67 @@ @json.json_view def load_social_stories(request, user_id, username=None): - user = get_user(request) + user = get_user(request) social_user_id = int(user_id) - social_user = get_object_or_404(User, pk=social_user_id) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 6)) - page = int(request.GET.get('page', 1)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'all') - query = request.GET.get('query', '').strip() - include_story_content = is_true(request.GET.get('include_story_content', True)) - stories = [] - message = None - - if page: offset = limit * (int(page) - 1) + social_user = get_object_or_404(User, pk=social_user_id) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 6)) + page = int(request.GET.get("page", 1)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "all") + query = request.GET.get("query", "").strip() + include_story_content = is_true(request.GET.get("include_story_content", True)) + stories = [] + message = None + + if page: + offset = limit * (int(page) - 1) now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) - + social_profile = MSocialProfile.get_user(social_user.pk) try: socialsub = MSocialSubscription.objects.get(user_id=user.pk, subscription_user_id=social_user_id) except MSocialSubscription.DoesNotExist: socialsub = None - + if social_profile.private and not social_profile.is_followed_by_user(user.pk): - message = "%s has a private blurblog and you must be following them in order to read it." % social_profile.username + message = ( + "%s has a private blurblog and you must be following them in order to read it." + % social_profile.username + ) elif query: if user.profile.is_premium: stories = social_profile.find_stories(query, offset=offset, limit=limit) else: stories = [] message = "You must be a premium subscriber to search." - elif socialsub and (read_filter == 'unread' or order == 'oldest'): + elif socialsub and (read_filter == "unread" or order == "oldest"): cutoff_date = max(socialsub.mark_read_date, user.profile.unread_cutoff) - story_hashes = socialsub.get_stories(order=order, read_filter=read_filter, offset=offset, limit=limit, cutoff_date=cutoff_date) - story_date_order = "%sshared_date" % ('' if order == 'oldest' else '-') + story_hashes = socialsub.get_stories( + order=order, read_filter=read_filter, offset=offset, limit=limit, cutoff_date=cutoff_date + ) + story_date_order = "%sshared_date" % ("" if order == "oldest" else "-") if story_hashes: - mstories = MSharedStory.objects(user_id=social_user.pk, - story_hash__in=story_hashes).order_by(story_date_order) - for story in mstories: story.extract_image_urls() + mstories = MSharedStory.objects(user_id=social_user.pk, story_hash__in=story_hashes).order_by( + story_date_order + ) + for story in mstories: + story.extract_image_urls() stories = Feed.format_stories(mstories) else: - mstories = MSharedStory.objects(user_id=social_user.pk).order_by('-shared_date')[offset:offset+limit] - for story in mstories: story.extract_image_urls() + mstories = MSharedStory.objects(user_id=social_user.pk).order_by("-shared_date")[ + offset : offset + limit + ] + for story in mstories: + story.extract_image_urls() stories = Feed.format_stories(mstories) - if not stories or False: # False is to force a recount even if 0 stories + if not stories or False: # False is to force a recount even if 0 stories return dict(stories=[], message=message) - + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, user.pk, check_all=True) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) unsub_feed_ids = list(set(story_feed_ids).difference(set(usersubs_map.keys()))) @@ -95,115 +117,140 @@ def load_social_stories(request, user_id, username=None): date_delta = user.profile.unread_cutoff if socialsub and date_delta < socialsub.mark_read_date: date_delta = socialsub.mark_read_date - + # Get intelligence classifier for user - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id=social_user_id)) classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, social_user_id=social_user_id)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id=social_user_id)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, social_user_id=social_user_id)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, social_user_id=social_user_id)) # Merge with feed specific classifiers - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_authors = classifier_authors + list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_titles = classifier_titles + list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids)) - classifier_tags = classifier_tags + list(MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_authors = classifier_authors + list( + MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_titles = classifier_titles + list( + MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_tags = classifier_tags + list( + MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) unread_story_hashes = [] - if (read_filter == 'all' or query) and socialsub: - unread_story_hashes = socialsub.get_stories(read_filter='unread', limit=500, cutoff_date=user.profile.unread_cutoff) - story_hashes = [story['story_hash'] for story in stories] - - starred_stories = MStarredStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .only('story_hash', 'starred_date', 'user_tags') - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags)) - for story in starred_stories]) - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) - + if (read_filter == "all" or query) and socialsub: + unread_story_hashes = socialsub.get_stories( + read_filter="unread", limit=500, cutoff_date=user.profile.unread_cutoff + ) + story_hashes = [story["story_hash"] for story in stories] + + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).only( + "story_hash", "starred_date", "user_tags" + ) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + starred_stories = dict( + [ + (story.story_hash, dict(starred_date=story.starred_date, user_tags=story.user_tags)) + for story in starred_stories + ] + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) + nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: if not include_story_content: - del story['story_content'] - story['social_user_id'] = social_user_id + del story["story_content"] + story["social_user_id"] = social_user_id # story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - shared_date = localtime_for_timezone(story['shared_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(shared_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(shared_date, nowtz) - - story['read_status'] = 1 - if story['story_date'] < user.profile.unread_cutoff: - story['read_status'] = 1 - elif (read_filter == 'all' or query) and socialsub: - story['read_status'] = 1 if story['story_hash'] not in unread_story_hashes else 0 - elif read_filter == 'unread' and socialsub: - story['read_status'] = 0 - - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], - user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - if story['story_hash'] in shared_stories: - story['shared'] = True - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=social_user_id), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + shared_date = localtime_for_timezone(story["shared_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(shared_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(shared_date, nowtz) + + story["read_status"] = 1 + if story["story_date"] < user.profile.unread_cutoff: + story["read_status"] = 1 + elif (read_filter == "all" or query) and socialsub: + story["read_status"] = 1 if story["story_hash"] not in unread_story_hashes else 0 + elif read_filter == "unread" and socialsub: + story["read_status"] = 0 + + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + if story["story_hash"] in shared_stories: + story["shared"] = True + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + + story["intelligence"] = { + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=social_user_id + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - - - classifiers = sort_classifiers_by_feed(user=user, feed_ids=story_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=story_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) if socialsub: socialsub.feed_opens += 1 socialsub.needs_unread_recalc = True socialsub.save() - + search_log = "~SN~FG(~SB%s~SN)" % query if query else "" - logging.user(request, "~FYLoading ~FMshared stories~FY: ~SB%s%s %s" % ( - social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', search_log)) + logging.user( + request, + "~FYLoading ~FMshared stories~FY: ~SB%s%s %s" + % (social_profile.title[:22], ("~SN/p%s" % page) if page > 1 else "", search_log), + ) return { - "stories": stories, - "user_profiles": user_profiles, - "feeds": unsub_feeds, + "stories": stories, + "user_profiles": user_profiles, + "feeds": unsub_feeds, "classifiers": classifiers, } - + + @json.json_view def load_river_blurblog(request): - limit = int(request.GET.get('limit', 10)) - start = time.time() - user = get_user(request) - social_user_ids = request.GET.getlist('social_user_ids') or request.GET.getlist('social_user_ids[]') - social_user_ids = [int(uid) for uid in social_user_ids if uid] + limit = int(request.GET.get("limit", 10)) + start = time.time() + user = get_user(request) + social_user_ids = request.GET.getlist("social_user_ids") or request.GET.getlist("social_user_ids[]") + social_user_ids = [int(uid) for uid in social_user_ids if uid] original_user_ids = list(social_user_ids) - page = int(request.GET.get('page', 1)) - order = request.GET.get('order', 'newest') - read_filter = request.GET.get('read_filter', 'unread') - relative_user_id = request.GET.get('relative_user_id', None) - global_feed = request.GET.get('global_feed', None) - on_dashboard = is_true(request.GET.get('dashboard', False)) - now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) + page = int(request.GET.get("page", 1)) + order = request.GET.get("order", "newest") + read_filter = request.GET.get("read_filter", "unread") + relative_user_id = request.GET.get("relative_user_id", None) + global_feed = request.GET.get("global_feed", None) + on_dashboard = is_true(request.GET.get("dashboard", False)) + now = localtime_for_timezone(datetime.datetime.now(), user.profile.timezone) if global_feed: - global_user = User.objects.get(username='popular') + global_user = User.objects.get(username="popular") relative_user_id = global_user.pk - + if not relative_user_id: relative_user_id = user.pk @@ -213,147 +260,172 @@ def load_river_blurblog(request): if not social_user_ids: social_user_ids = [s.subscription_user_id for s in socialsubs] - - offset = (page-1) * limit + + offset = (page - 1) * limit limit = page * limit - 1 - + story_hashes, story_dates, unread_feed_story_hashes = MSocialSubscription.feed_stories( - user.pk, social_user_ids, - offset=offset, limit=limit, - order=order, read_filter=read_filter, - relative_user_id=relative_user_id, - socialsubs=socialsubs, - cutoff_date=user.profile.unread_cutoff, - dashboard_global=on_dashboard and global_feed) + user.pk, + social_user_ids, + offset=offset, + limit=limit, + order=order, + read_filter=read_filter, + relative_user_id=relative_user_id, + socialsubs=socialsubs, + cutoff_date=user.profile.unread_cutoff, + dashboard_global=on_dashboard and global_feed, + ) mstories = MStory.find_by_story_hashes(story_hashes) story_hashes_to_dates = dict(list(zip(story_hashes, story_dates))) sorted_mstories = reversed(sorted(mstories, key=lambda x: int(story_hashes_to_dates[str(x.story_hash)]))) stories = Feed.format_stories(sorted_mstories) for s, story in enumerate(stories): - timestamp = story_hashes_to_dates[story['story_hash']] - story['story_date'] = datetime.datetime.fromtimestamp(timestamp) + timestamp = story_hashes_to_dates[story["story_hash"]] + story["story_date"] = datetime.datetime.fromtimestamp(timestamp) share_relative_user_id = relative_user_id if global_feed: share_relative_user_id = user.pk - - stories, user_profiles = MSharedStory.stories_with_comments_and_profiles(stories, - share_relative_user_id, - check_all=True) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + stories, user_profiles = MSharedStory.stories_with_comments_and_profiles( + stories, share_relative_user_id, check_all=True + ) + + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) usersubs = UserSubscription.objects.filter(user__pk=user.pk, feed__pk__in=story_feed_ids) usersubs_map = dict((sub.feed_id, sub) for sub in usersubs) unsub_feed_ids = list(set(story_feed_ids).difference(set(usersubs_map.keys()))) unsub_feeds = Feed.objects.filter(pk__in=unsub_feed_ids) unsub_feeds = [feed.canonical(include_favicon=False) for feed in unsub_feeds] - + if story_feed_ids: - story_hashes = [story['story_hash'] for story in stories] - starred_stories = MStarredStory.objects( - user_id=user.pk, - story_hash__in=story_hashes - ).only('story_hash', 'starred_date', 'user_tags') - starred_stories = dict([(story.story_hash, dict(starred_date=story.starred_date, - user_tags=story.user_tags)) - for story in starred_stories]) - shared_stories = MSharedStory.objects(user_id=user.pk, - story_hash__in=story_hashes)\ - .hint([('story_hash', 1)])\ - .only('story_hash', 'shared_date', 'comments') - shared_stories = dict([(story.story_hash, dict(shared_date=story.shared_date, - comments=story.comments)) - for story in shared_stories]) + story_hashes = [story["story_hash"] for story in stories] + starred_stories = MStarredStory.objects(user_id=user.pk, story_hash__in=story_hashes).only( + "story_hash", "starred_date", "user_tags" + ) + starred_stories = dict( + [ + (story.story_hash, dict(starred_date=story.starred_date, user_tags=story.user_tags)) + for story in starred_stories + ] + ) + shared_stories = ( + MSharedStory.objects(user_id=user.pk, story_hash__in=story_hashes) + .hint([("story_hash", 1)]) + .only("story_hash", "shared_date", "comments") + ) + shared_stories = dict( + [ + (story.story_hash, dict(shared_date=story.shared_date, comments=story.comments)) + for story in shared_stories + ] + ) else: starred_stories = {} shared_stories = {} - + # Intelligence classifiers for all feeds involved if story_feed_ids: - classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, - social_user_id__in=social_user_ids)) - classifier_feeds = classifier_feeds + list(MClassifierFeed.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) - classifier_tags = list(MClassifierTag.objects(user_id=user.pk, - feed_id__in=story_feed_ids)) + classifier_feeds = list(MClassifierFeed.objects(user_id=user.pk, social_user_id__in=social_user_ids)) + classifier_feeds = classifier_feeds + list( + MClassifierFeed.objects(user_id=user.pk, feed_id__in=story_feed_ids) + ) + classifier_authors = list(MClassifierAuthor.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_titles = list(MClassifierTitle.objects(user_id=user.pk, feed_id__in=story_feed_ids)) + classifier_tags = list(MClassifierTag.objects(user_id=user.pk, feed_id__in=story_feed_ids)) else: classifier_feeds = [] classifier_authors = [] classifier_titles = [] classifier_tags = [] - + # Just need to format stories nowtz = localtime_for_timezone(now, user.profile.timezone) for story in stories: - story['read_status'] = 0 - if story['story_hash'] not in unread_feed_story_hashes: - story['read_status'] = 1 - story_date = localtime_for_timezone(story['story_date'], user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(story_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(story_date, nowtz) - if story['story_hash'] in starred_stories: - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[story['story_hash']]['starred_date'], user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['user_tags'] = starred_stories[story['story_hash']]['user_tags'] - story['intelligence'] = { - 'feed': apply_classifier_feeds(classifier_feeds, story['story_feed_id'], - social_user_ids=story['friend_user_ids']), - 'author': apply_classifier_authors(classifier_authors, story), - 'tags': apply_classifier_tags(classifier_tags, story), - 'title': apply_classifier_titles(classifier_titles, story), + story["read_status"] = 0 + if story["story_hash"] not in unread_feed_story_hashes: + story["read_status"] = 1 + story_date = localtime_for_timezone(story["story_date"], user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(story_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(story_date, nowtz) + if story["story_hash"] in starred_stories: + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[story["story_hash"]]["starred_date"], user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["user_tags"] = starred_stories[story["story_hash"]]["user_tags"] + story["intelligence"] = { + "feed": apply_classifier_feeds( + classifier_feeds, story["story_feed_id"], social_user_ids=story["friend_user_ids"] + ), + "author": apply_classifier_authors(classifier_authors, story), + "tags": apply_classifier_tags(classifier_tags, story), + "title": apply_classifier_titles(classifier_titles, story), } - if story['story_hash'] in shared_stories: - story['shared'] = True - shared_date = localtime_for_timezone(shared_stories[story['story_hash']]['shared_date'], - user.profile.timezone) - story['shared_date'] = format_story_link_date__long(shared_date, now) - story['shared_comments'] = strip_tags(shared_stories[story['story_hash']]['comments']) - if (shared_stories[story['story_hash']]['shared_date'] < user.profile.unread_cutoff or - story['story_hash'] not in unread_feed_story_hashes): - story['read_status'] = 1 - - classifiers = sort_classifiers_by_feed(user=user, feed_ids=story_feed_ids, - classifier_feeds=classifier_feeds, - classifier_authors=classifier_authors, - classifier_titles=classifier_titles, - classifier_tags=classifier_tags) + if story["story_hash"] in shared_stories: + story["shared"] = True + shared_date = localtime_for_timezone( + shared_stories[story["story_hash"]]["shared_date"], user.profile.timezone + ) + story["shared_date"] = format_story_link_date__long(shared_date, now) + story["shared_comments"] = strip_tags(shared_stories[story["story_hash"]]["comments"]) + if ( + shared_stories[story["story_hash"]]["shared_date"] < user.profile.unread_cutoff + or story["story_hash"] not in unread_feed_story_hashes + ): + story["read_status"] = 1 + + classifiers = sort_classifiers_by_feed( + user=user, + feed_ids=story_feed_ids, + classifier_feeds=classifier_feeds, + classifier_authors=classifier_authors, + classifier_titles=classifier_titles, + classifier_tags=classifier_tags, + ) diff = time.time() - start timediff = round(float(diff), 2) - logging.user(request, "~FY%sLoading ~FCriver ~FMblurblogs~FC stories~FY: ~SBp%s~SN (%s/%s " - "stories, ~SN%s/%s/%s feeds)" % - ("~FCAuto-" if on_dashboard else "", - page, len(stories), len(mstories), len(story_feed_ids), - len(social_user_ids), len(original_user_ids))) - - + logging.user( + request, + "~FY%sLoading ~FCriver ~FMblurblogs~FC stories~FY: ~SBp%s~SN (%s/%s " + "stories, ~SN%s/%s/%s feeds)" + % ( + "~FCAuto-" if on_dashboard else "", + page, + len(stories), + len(mstories), + len(story_feed_ids), + len(social_user_ids), + len(original_user_ids), + ), + ) + return { - "stories": stories, - "user_profiles": user_profiles, - "feeds": unsub_feeds, + "stories": stories, + "user_profiles": user_profiles, + "feeds": unsub_feeds, "classifiers": classifiers, "elapsed_time": timediff, } - + + def load_social_page(request, user_id, username=None, **kwargs): user = get_user(request.user) social_user_id = int(user_id) social_user = get_object_or_404(User, pk=social_user_id) - offset = int(request.GET.get('offset', 0)) - limit = int(request.GET.get('limit', 6)) + offset = int(request.GET.get("offset", 0)) + limit = int(request.GET.get("limit", 6)) try: - page = int(request.GET.get('page', 1)) + page = int(request.GET.get("page", 1)) except ValueError: page = 1 - format = request.GET.get('format', None) + format = request.GET.get("format", None) has_next_page = False - feed_id = kwargs.get('feed_id') or request.GET.get('feed_id') - if page: - offset = limit * (page-1) + feed_id = kwargs.get("feed_id") or request.GET.get("feed_id") + if page: + offset = limit * (page - 1) social_services = None user_social_profile = None user_social_services = None @@ -364,9 +436,9 @@ def load_social_page(request, user_id, username=None, **kwargs): user_social_services = MSocialServices.get_user(user.pk) user_following_social_profile = user_social_profile.is_following_user(social_user_id) social_profile = MSocialProfile.get_user(social_user_id) - - if username and '.dev' in username: - username = username.replace('.dev', '') + + if username and ".dev" in username: + username = username.replace(".dev", "") current_tab = "blurblogs" global_feed = False if username == "popular": @@ -374,39 +446,46 @@ def load_social_page(request, user_id, username=None, **kwargs): elif username == "popular.global": current_tab = "global" global_feed = True - - if social_profile.private and (not user.is_authenticated or - not social_profile.is_followed_by_user(user.pk)): + + if social_profile.private and ( + not user.is_authenticated or not social_profile.is_followed_by_user(user.pk) + ): stories = [] elif global_feed: - socialsubs = MSocialSubscription.objects.filter(user_id=relative_user_id) + socialsubs = MSocialSubscription.objects.filter(user_id=relative_user_id) social_user_ids = [s.subscription_user_id for s in socialsubs] - story_ids, story_dates, _ = MSocialSubscription.feed_stories(user.pk, social_user_ids, - offset=offset, limit=limit+1, - # order=order, read_filter=read_filter, - relative_user_id=relative_user_id, - cache=request.user.is_authenticated, - cutoff_date=user.profile.unread_cutoff) + story_ids, story_dates, _ = MSocialSubscription.feed_stories( + user.pk, + social_user_ids, + offset=offset, + limit=limit + 1, + # order=order, read_filter=read_filter, + relative_user_id=relative_user_id, + cache=request.user.is_authenticated, + cutoff_date=user.profile.unread_cutoff, + ) if len(story_ids) > limit: has_next_page = True story_ids = story_ids[:-1] mstories = MStory.find_by_story_hashes(story_ids) story_id_to_dates = dict(list(zip(story_ids, story_dates))) + def sort_stories_by_id(a, b): return int(story_id_to_dates[str(b.story_hash)]) - int(story_id_to_dates[str(a.story_hash)]) + sorted_mstories = sorted(mstories, key=sort_stories_by_id) stories = Feed.format_stories(sorted_mstories) for story in stories: - story['shared_date'] = story['story_date'] + story["shared_date"] = story["story_date"] else: params = dict(user_id=social_user.pk) if feed_id: - params['story_feed_id'] = feed_id - if 'story_db_id' in params: - params.pop('story_db_id') - mstories = MSharedStory.objects(**params).order_by('-shared_date')[offset:offset+limit+1] + params["story_feed_id"] = feed_id + if "story_db_id" in params: + params.pop("story_db_id") + mstories = MSharedStory.objects(**params).order_by("-shared_date")[offset : offset + limit + 1] stories = Feed.format_stories(mstories, include_permalinks=True) - + if len(stories) > limit: has_next_page = True stories = stories[:-1] @@ -419,181 +498,210 @@ def sort_stories_by_id(a, b): "social_user": social_user, "social_profile": social_profile, "user_social_services": user_social_services, - 'user_social_profile' : json.encode(user_social_profile and user_social_profile.page()), - 'user_following_social_profile': user_following_social_profile, + "user_social_profile": json.encode(user_social_profile and user_social_profile.page()), + "user_following_social_profile": user_following_social_profile, } - template = 'social/social_page.xhtml' + template = "social/social_page.xhtml" return render(request, template, params) - story_feed_ids = list(set(s['story_feed_id'] for s in stories)) + story_feed_ids = list(set(s["story_feed_id"] for s in stories)) feeds = Feed.objects.filter(pk__in=story_feed_ids) feeds = dict((feed.pk, feed.canonical(include_favicon=False)) for feed in feeds) for story in stories: - if story['story_feed_id'] in feeds: + if story["story_feed_id"] in feeds: # Feed could have been deleted. - story['feed'] = feeds[story['story_feed_id']] - shared_date = localtime_for_timezone(story['shared_date'], user.profile.timezone) - story['shared_date'] = shared_date - - stories, profiles = MSharedStory.stories_with_comments_and_profiles(stories, social_user.pk, - check_all=True) + story["feed"] = feeds[story["story_feed_id"]] + shared_date = localtime_for_timezone(story["shared_date"], user.profile.timezone) + story["shared_date"] = shared_date + + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + stories, social_user.pk, check_all=True + ) if user.is_authenticated: for story in stories: - if user.pk in story['share_user_ids']: - story['shared_by_user'] = True - shared_story = MSharedStory.objects.hint([('story_hash', 1)])\ - .get(user_id=user.pk, - story_feed_id=story['story_feed_id'], - story_hash=story['story_hash']) - story['user_comments'] = shared_story.comments + if user.pk in story["share_user_ids"]: + story["shared_by_user"] = True + shared_story = MSharedStory.objects.hint([("story_hash", 1)]).get( + user_id=user.pk, story_feed_id=story["story_feed_id"], story_hash=story["story_hash"] + ) + story["user_comments"] = shared_story.comments stories = MSharedStory.attach_users_to_stories(stories, profiles) - + active_story = None - path = request.META['PATH_INFO'] - if '/story/' in path and format != 'html': + path = request.META["PATH_INFO"] + if "/story/" in path and format != "html": story_id = re.sub(r"^/story/.*?/(.*?)/?", "", path) - if not story_id or '/story' in story_id: - story_id = path.replace('/story/', '') + if not story_id or "/story" in story_id: + story_id = path.replace("/story/", "") social_services = MSocialServices.get_user(social_user.pk) - active_story_db = MSharedStory.objects.filter(user_id=social_user.pk, - story_hash=story_id)\ - .hint([('story_hash', 1)])\ - .limit(1) + active_story_db = ( + MSharedStory.objects.filter(user_id=social_user.pk, story_hash=story_id) + .hint([("story_hash", 1)]) + .limit(1) + ) if active_story_db: active_story_db = active_story_db[0] if user_social_profile.bb_permalink_direct: return HttpResponseRedirect(active_story_db.story_permalink) active_story = Feed.format_story(active_story_db) if active_story_db.image_count: - active_story['image_url'] = active_story_db.image_sizes[0]['src'] - active_story['tags'] = ', '.join(active_story_db.story_tags) - active_story['blurblog_permalink'] = active_story_db.blurblog_permalink() - active_story['iso8601'] = active_story_db.story_date.isoformat() - if active_story['story_feed_id']: - feed = Feed.get_by_id(active_story['story_feed_id']) + active_story["image_url"] = active_story_db.image_sizes[0]["src"] + active_story["tags"] = ", ".join(active_story_db.story_tags) + active_story["blurblog_permalink"] = active_story_db.blurblog_permalink() + active_story["iso8601"] = active_story_db.story_date.isoformat() + if active_story["story_feed_id"]: + feed = Feed.get_by_id(active_story["story_feed_id"]) if feed: - active_story['feed'] = feed.canonical() - + active_story["feed"] = feed.canonical() + params = { - 'social_user' : social_user, - 'stories' : stories, - 'user_social_profile' : user_social_profile, - 'user_social_profile_page' : json.encode(user_social_profile and user_social_profile.page()), - 'user_social_services' : user_social_services, - 'user_social_services_page' : json.encode(user_social_services and user_social_services.canonical()), - 'user_following_social_profile': user_following_social_profile, - 'social_profile': social_profile, - 'feeds' : feeds, - 'user_profile' : hasattr(user, 'profile') and user.profile, - 'has_next_page' : has_next_page, - 'holzer_truism' : random.choice(jennyholzer.TRUISMS), #if not has_next_page else None - 'facebook_app_id': settings.FACEBOOK_APP_ID, - 'active_story' : active_story, - 'current_tab' : current_tab, - 'social_services': social_services, + "social_user": social_user, + "stories": stories, + "user_social_profile": user_social_profile, + "user_social_profile_page": json.encode(user_social_profile and user_social_profile.page()), + "user_social_services": user_social_services, + "user_social_services_page": json.encode(user_social_services and user_social_services.canonical()), + "user_following_social_profile": user_following_social_profile, + "social_profile": social_profile, + "feeds": feeds, + "user_profile": hasattr(user, "profile") and user.profile, + "has_next_page": has_next_page, + "holzer_truism": random.choice(jennyholzer.TRUISMS), # if not has_next_page else None + "facebook_app_id": settings.FACEBOOK_APP_ID, + "active_story": active_story, + "current_tab": current_tab, + "social_services": social_services, } - logging.user(request, "~FYLoading ~FMsocial page~FY: ~SB%s%s ~FM%s/%s" % ( - social_profile.title[:22], ('~SN/p%s' % page) if page > 1 else '', - request.META.get('HTTP_USER_AGENT', "")[:40], - request.META.get('HTTP_X_FORWARDED_FOR', ""))) - if format == 'html': - template = 'social/social_stories.xhtml' + logging.user( + request, + "~FYLoading ~FMsocial page~FY: ~SB%s%s ~FM%s/%s" + % ( + social_profile.title[:22], + ("~SN/p%s" % page) if page > 1 else "", + request.META.get("HTTP_USER_AGENT", "")[:40], + request.META.get("HTTP_X_FORWARDED_FOR", ""), + ), + ) + if format == "html": + template = "social/social_stories.xhtml" else: - template = 'social/social_page.xhtml' - + template = "social/social_page.xhtml" + return render(request, template, params) -@required_params('story_id', feed_id=int, method="GET") + +@required_params("story_id", feed_id=int, method="GET") def story_public_comments(request): - format = request.GET.get('format', 'json') - relative_user_id = request.GET.get('user_id', None) - feed_id = int(request.GET.get('feed_id')) - story_id = request.GET.get('story_id') - + format = request.GET.get("format", "json") + relative_user_id = request.GET.get("user_id", None) + feed_id = int(request.GET.get("feed_id")) + story_id = request.GET.get("story_id") + if not relative_user_id: relative_user_id = get_user(request).pk - + story, _ = MStory.find_story(story_feed_id=feed_id, story_id=story_id) if not story: - return json.json_response(request, { - 'message': "Story not found.", - 'code': -1, - }) - + return json.json_response( + request, + { + "message": "Story not found.", + "code": -1, + }, + ) + story = Feed.format_story(story) - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], - relative_user_id, - check_all=True) - - if format == 'html': + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=True + ) + + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/story_comments.xhtml', { - 'story': stories[0], - }) + return render( + request, + "social/story_comments.xhtml", + { + "story": stories[0], + }, + ) else: - return json.json_response(request, { - 'comments': stories[0]['public_comments'], - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "comments": stories[0]["public_comments"], + "user_profiles": profiles, + }, + ) + @ajax_login_required def mark_story_as_shared(request): - code = 1 - feed_id = int(request.POST['feed_id']) - story_id = request.POST['story_id'] - comments = request.POST.get('comments', '') - source_user_id = request.POST.get('source_user_id') - relative_user_id = request.POST.get('relative_user_id') or request.user.pk - post_to_services = request.POST.getlist('post_to_services') or request.POST.getlist('post_to_services[]') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["feed_id"]) + story_id = request.POST["story_id"] + comments = request.POST.get("comments", "") + source_user_id = request.POST.get("source_user_id") + relative_user_id = request.POST.get("relative_user_id") or request.user.pk + post_to_services = request.POST.getlist("post_to_services") or request.POST.getlist("post_to_services[]") + format = request.POST.get("format", "json") now = datetime.datetime.now() nowtz = localtime_for_timezone(now, request.user.profile.timezone) - + MSocialProfile.get_user(request.user.pk) - + story, original_story_found = MStory.find_story(feed_id, story_id) if not story: - return json.json_response(request, { - 'code': -1, - 'message': 'Could not find the original story and no copies could be found.' - }) - + return json.json_response( + request, + {"code": -1, "message": "Could not find the original story and no copies could be found."}, + ) + feed = Feed.get_by_id(feed_id) if feed and feed.is_newsletter: - return json.json_response(request, { - 'code': -1, - 'message': 'You cannot share newsletters. Somebody could unsubscribe you!' - }) - - if not request.user.profile.is_premium and MSharedStory.feed_quota(request.user.pk, story.story_hash, feed_id=feed_id): - return json.json_response(request, { - 'code': -1, - 'message': 'Only premium users can share multiple stories per day from the same site.' - }) - + return json.json_response( + request, {"code": -1, "message": "You cannot share newsletters. Somebody could unsubscribe you!"} + ) + + if not request.user.profile.is_premium and MSharedStory.feed_quota( + request.user.pk, story.story_hash, feed_id=feed_id + ): + return json.json_response( + request, + { + "code": -1, + "message": "Only premium users can share multiple stories per day from the same site.", + }, + ) + quota = 100 if not request.user.profile.is_premium: quota = 3 if MSharedStory.feed_quota(request.user.pk, story.story_hash, quota=quota): - logging.user(request, "~FRNOT ~FCSharing ~FM%s~FC, over quota: ~SB~FB%s" % (story.story_title[:20], comments[:30])) - message = 'You can only share up to %s stories per day.' % quota + logging.user( + request, + "~FRNOT ~FCSharing ~FM%s~FC, over quota: ~SB~FB%s" % (story.story_title[:20], comments[:30]), + ) + message = "You can only share up to %s stories per day." % quota if not request.user.profile.is_premium: - message = 'You can only share up to %s stories per day as a free user. Upgrade to premium to share more.' % quota - return json.json_response(request, { - 'code': -1, - 'message': message - }) - - shared_story = MSharedStory.objects.filter(user_id=request.user.pk, - story_feed_id=feed_id, - story_hash=story['story_hash'])\ - .hint([('story_hash', 1)])\ - .limit(1).first() + message = ( + "You can only share up to %s stories per day as a free user. Upgrade to premium to share more." + % quota + ) + return json.json_response(request, {"code": -1, "message": message}) + + shared_story = ( + MSharedStory.objects.filter( + user_id=request.user.pk, story_feed_id=feed_id, story_hash=story["story_hash"] + ) + .hint([("story_hash", 1)]) + .limit(1) + .first() + ) if not shared_story: story_db = { "story_guid": story.story_guid, @@ -601,7 +709,7 @@ def mark_story_as_shared(request): "story_permalink": story.story_permalink, "story_title": story.story_title, "story_feed_id": story.story_feed_id, - "story_content_z": getattr(story, 'story_latest_content_z', None) or story.story_content_z, + "story_content_z": getattr(story, "story_latest_content_z", None) or story.story_content_z, "story_author_name": story.story_author_name, "story_tags": story.story_tags, "story_date": story.story_date, @@ -613,160 +721,198 @@ def mark_story_as_shared(request): shared_story = MSharedStory.objects.create(**story_db) shared_story.publish_to_subscribers() except NotUniqueError: - shared_story = MSharedStory.objects.get(story_guid=story_db['story_guid'], - user_id=story_db['user_id']) + shared_story = MSharedStory.objects.get( + story_guid=story_db["story_guid"], user_id=story_db["user_id"] + ) except MSharedStory.DoesNotExist: - return json.json_response(request, { - 'code': -1, - 'message': 'Story already shared but then not shared. I don\'t really know. Did you submit this twice very quickly?' - }) + return json.json_response( + request, + { + "code": -1, + "message": "Story already shared but then not shared. I don't really know. Did you submit this twice very quickly?", + }, + ) if source_user_id: shared_story.set_source_user_id(int(source_user_id)) - UpdateRecalcForSubscription.delay(subscription_user_id=request.user.pk, - shared_story_id=str(shared_story.id)) + UpdateRecalcForSubscription.delay( + subscription_user_id=request.user.pk, shared_story_id=str(shared_story.id) + ) logging.user(request, "~FCSharing ~FM%s: ~SB~FB%s" % (story.story_title[:20], comments[:30])) else: shared_story.comments = comments shared_story.has_comments = bool(comments) shared_story.save() - logging.user(request, "~FCUpdating shared story ~FM%s: ~SB~FB%s" % ( - story.story_title[:20], comments[:30])) - + logging.user( + request, "~FCUpdating shared story ~FM%s: ~SB~FB%s" % (story.story_title[:20], comments[:30]) + ) + if original_story_found: story.count_comments() - + story = Feed.format_story(story) check_all = not original_story_found - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], relative_user_id, - check_all=check_all) + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=check_all + ) story = stories[0] - starred_stories = MStarredStory.objects(user_id=request.user.pk, - story_feed_id=story['story_feed_id'], - story_hash=story['story_hash'])\ - .only('story_hash', 'starred_date', 'user_tags').limit(1) + starred_stories = ( + MStarredStory.objects( + user_id=request.user.pk, story_feed_id=story["story_feed_id"], story_hash=story["story_hash"] + ) + .only("story_hash", "starred_date", "user_tags") + .limit(1) + ) if starred_stories: - story['user_tags'] = starred_stories[0]['user_tags'] - story['starred'] = True - starred_date = localtime_for_timezone(starred_stories[0]['starred_date'], - request.user.profile.timezone) - story['starred_date'] = format_story_link_date__long(starred_date, now) - story['shared_comments'] = strip_tags(shared_story['comments'] or "") - story['shared_by_user'] = True - story['shared'] = True - shared_date = localtime_for_timezone(shared_story['shared_date'], request.user.profile.timezone) - story['short_parsed_date'] = format_story_link_date__short(shared_date, nowtz) - story['long_parsed_date'] = format_story_link_date__long(shared_date, nowtz) - + story["user_tags"] = starred_stories[0]["user_tags"] + story["starred"] = True + starred_date = localtime_for_timezone( + starred_stories[0]["starred_date"], request.user.profile.timezone + ) + story["starred_date"] = format_story_link_date__long(starred_date, now) + story["shared_comments"] = strip_tags(shared_story["comments"] or "") + story["shared_by_user"] = True + story["shared"] = True + shared_date = localtime_for_timezone(shared_story["shared_date"], request.user.profile.timezone) + story["short_parsed_date"] = format_story_link_date__short(shared_date, nowtz) + story["long_parsed_date"] = format_story_link_date__long(shared_date, nowtz) + if post_to_services: for service in post_to_services: if service not in shared_story.posted_to_services: PostToService.delay(shared_story_id=str(shared_story.id), service=service) - + if shared_story.source_user_id and shared_story.comments: - EmailStoryReshares.apply_async(kwargs=dict(shared_story_id=str(shared_story.id)), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - + EmailStoryReshares.apply_async( + kwargs=dict(shared_story_id=str(shared_story.id)), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + EmailFirstShare.apply_async(kwargs=dict(user_id=request.user.pk)) - - if format == 'html': + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/social_story.xhtml', { - 'story': story, - }) + return render( + request, + "social/social_story.xhtml", + { + "story": story, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'story': story, - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "code": code, + "story": story, + "user_profiles": profiles, + }, + ) + @ajax_login_required def mark_story_as_unshared(request): - feed_id = int(request.POST['feed_id']) - story_id = request.POST['story_id'] - relative_user_id = request.POST.get('relative_user_id') or request.user.pk - format = request.POST.get('format', 'json') + feed_id = int(request.POST["feed_id"]) + story_id = request.POST["story_id"] + relative_user_id = request.POST.get("relative_user_id") or request.user.pk + format = request.POST.get("format", "json") original_story_found = True - - story, original_story_found = MStory.find_story(story_feed_id=feed_id, - story_id=story_id) - - shared_story = MSharedStory.objects(user_id=request.user.pk, - story_feed_id=feed_id, - story_hash=story['story_hash']).limit(1).first() + + story, original_story_found = MStory.find_story(story_feed_id=feed_id, story_id=story_id) + + shared_story = ( + MSharedStory.objects(user_id=request.user.pk, story_feed_id=feed_id, story_hash=story["story_hash"]) + .limit(1) + .first() + ) if not shared_story: - return json.json_response(request, {'code': -1, 'message': 'Shared story not found.'}) - + return json.json_response(request, {"code": -1, "message": "Shared story not found."}) + shared_story.unshare_story() - + if original_story_found: story.count_comments() else: story = shared_story - + story = Feed.format_story(story) - stories, profiles = MSharedStory.stories_with_comments_and_profiles([story], - relative_user_id, - check_all=True) + stories, profiles = MSharedStory.stories_with_comments_and_profiles( + [story], relative_user_id, check_all=True + ) - if format == 'html': + if format == "html": stories = MSharedStory.attach_users_to_stories(stories, profiles) - return render(request, 'social/social_story.xhtml', { - 'story': stories[0], - }) + return render( + request, + "social/social_story.xhtml", + { + "story": stories[0], + }, + ) else: - return json.json_response(request, { - 'code': 1, - 'message': "Story unshared.", - 'story': stories[0], - 'user_profiles': profiles, - }) - + return json.json_response( + request, + { + "code": 1, + "message": "Story unshared.", + "story": stories[0], + "user_profiles": profiles, + }, + ) + + @ajax_login_required def save_comment_reply(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - reply_comments = request.POST.get('reply_comments') - reply_id = request.POST.get('reply_id') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + reply_comments = request.POST.get("reply_comments") + reply_id = request.POST.get("reply_id") + format = request.POST.get("format", "json") original_message = None - + if not reply_comments: - return json.json_response(request, { - 'code': -1, - 'message': 'Reply comments cannot be empty.', - }) - + return json.json_response( + request, + { + "code": -1, + "message": "Reply comments cannot be empty.", + }, + ) + commenter_profile = MSocialProfile.get_user(comment_user_id) if commenter_profile.protected and not commenter_profile.is_followed_by_user(request.user.pk): - return json.json_response(request, { - 'code': -1, - 'message': 'You must be following %s to reply to them.' % (commenter_profile.user.username if commenter_profile.user else "[deleted]"), - }) - + return json.json_response( + request, + { + "code": -1, + "message": "You must be following %s to reply to them." + % (commenter_profile.user.username if commenter_profile.user else "[deleted]"), + }, + ) + try: - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) except MSharedStory.DoesNotExist: - return json.json_response(request, { - 'code': -1, - 'message': 'Shared story cannot be found.', - }) - + return json.json_response( + request, + { + "code": -1, + "message": "Shared story cannot be found.", + }, + ) + reply = MCommentReply() reply.user_id = request.user.pk reply.publish_date = datetime.datetime.now() reply.comments = reply_comments - + if reply_id: replies = [] for story_reply in shared_story.replies: - if (story_reply.user_id == reply.user_id and - story_reply.reply_id == ObjectId(reply_id)): + if story_reply.user_id == reply.user_id and story_reply.reply_id == ObjectId(reply_id): reply.publish_date = story_reply.publish_date reply.reply_id = story_reply.reply_id original_message = story_reply.comments @@ -774,80 +920,96 @@ def save_comment_reply(request): else: replies.append(story_reply) shared_story.replies = replies - logging.user(request, "~FCUpdating comment reply in ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], reply_comments[:30])) + logging.user( + request, + "~FCUpdating comment reply in ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], reply_comments[:30]), + ) else: reply.reply_id = ObjectId() - logging.user(request, "~FCReplying to comment in: ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], reply_comments[:30])) + logging.user( + request, + "~FCReplying to comment in: ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], reply_comments[:30]), + ) shared_story.replies.append(reply) shared_story.save() - + comment, profiles = shared_story.comment_with_author_and_profiles() - + # Interaction for every other replier and original commenter - MActivity.new_comment_reply(user_id=request.user.pk, - comment_user_id=comment['user_id'], - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) - if comment['user_id'] != request.user.pk: - MInteraction.new_comment_reply(user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) - - reply_user_ids = list(r['user_id'] for r in comment['replies']) - for user_id in set(reply_user_ids).difference([comment['user_id']]): + MActivity.new_comment_reply( + user_id=request.user.pk, + comment_user_id=comment["user_id"], + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) + if comment["user_id"] != request.user.pk: + MInteraction.new_comment_reply( + user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) + + reply_user_ids = list(r["user_id"] for r in comment["replies"]) + for user_id in set(reply_user_ids).difference([comment["user_id"]]): if request.user.pk != user_id: - MInteraction.new_reply_reply(user_id=user_id, - comment_user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=reply_comments, - original_message=original_message, - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title) - - EmailCommentReplies.apply_async(kwargs=dict(shared_story_id=str(shared_story.id), - reply_id=str(reply.reply_id)), - countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS) - - if format == 'html': + MInteraction.new_reply_reply( + user_id=user_id, + comment_user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=reply_comments, + original_message=original_message, + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + ) + + EmailCommentReplies.apply_async( + kwargs=dict(shared_story_id=str(shared_story.id), reply_id=str(reply.reply_id)), + countdown=settings.SECONDS_TO_DELAY_CELERY_EMAILS, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'reply_id': reply.reply_id, - 'user_profiles': profiles - }) + return json.json_response( + request, {"code": code, "comment": comment, "reply_id": reply.reply_id, "user_profiles": profiles} + ) + @ajax_login_required def remove_comment_reply(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - reply_id = request.POST.get('reply_id') - format = request.POST.get('format', 'json') + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + reply_id = request.POST.get("reply_id") + format = request.POST.get("format", "json") original_message = None - - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) replies = [] for story_reply in shared_story.replies: - if ((story_reply.user_id == request.user.pk or request.user.is_staff) and - story_reply.reply_id == ObjectId(reply_id)): + if ( + story_reply.user_id == request.user.pk or request.user.is_staff + ) and story_reply.reply_id == ObjectId(reply_id): original_message = story_reply.comments # Skip reply else: @@ -855,53 +1017,64 @@ def remove_comment_reply(request): shared_story.replies = replies shared_story.save() - logging.user(request, "~FCRemoving comment reply in ~FM%s: ~SB~FB%s~FM" % ( - shared_story.story_title[:20], original_message and original_message[:30])) - + logging.user( + request, + "~FCRemoving comment reply in ~FM%s: ~SB~FB%s~FM" + % (shared_story.story_title[:20], original_message and original_message[:30]), + ) + comment, profiles = shared_story.comment_with_author_and_profiles() # Interaction for every other replier and original commenter - MActivity.remove_comment_reply(user_id=request.user.pk, - comment_user_id=comment['user_id'], - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - MInteraction.remove_comment_reply(user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - - reply_user_ids = [reply['user_id'] for reply in comment['replies']] - for user_id in set(reply_user_ids).difference([comment['user_id']]): + MActivity.remove_comment_reply( + user_id=request.user.pk, + comment_user_id=comment["user_id"], + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + MInteraction.remove_comment_reply( + user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + + reply_user_ids = [reply["user_id"] for reply in comment["replies"]] + for user_id in set(reply_user_ids).difference([comment["user_id"]]): if request.user.pk != user_id: - MInteraction.remove_reply_reply(user_id=user_id, - comment_user_id=comment['user_id'], - reply_user_id=request.user.pk, - reply_content=original_message, - story_id=story_id, - story_feed_id=feed_id) - - if format == 'html': + MInteraction.remove_reply_reply( + user_id=user_id, + comment_user_id=comment["user_id"], + reply_user_id=request.user.pk, + reply_content=original_message, + story_id=story_id, + story_feed_id=feed_id, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles - }) - -@render_to('social/mute_story.xhtml') + return json.json_response(request, {"code": code, "comment": comment, "user_profiles": profiles}) + + +@render_to("social/mute_story.xhtml") def mute_story(request, secret_token, shared_story_id): user_profile = Profile.objects.get(secret_token=secret_token) shared_story = MSharedStory.objects.get(id=shared_story_id) shared_story.mute_for_user(user_profile.user_id) - + return {} - + + def shared_stories_public(request, username): try: user = User.objects.get(username=username) @@ -909,50 +1082,59 @@ def shared_stories_public(request, username): raise Http404 shared_stories = MSharedStory.objects.filter(user_id=user.pk) - + return HttpResponse("There are %s stories shared by %s." % (shared_stories.count(), username)) - + + @json.json_view def profile(request): user = get_user(request.user) - user_id = int(request.GET.get('user_id', user.pk)) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - include_activities_html = request.GET.get('include_activities_html', None) + user_id = int(request.GET.get("user_id", user.pk)) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + include_activities_html = request.GET.get("include_activities_html", None) user_profile = MSocialProfile.get_user(user_id) user_profile.count_follows() - + activities = [] if not user_profile.private or user_profile.is_followed_by_user(user.pk): activities, _ = MActivity.user(user_id, page=1, public=True, categories=categories) user_profile = user_profile.canonical(include_follows=True, common_follows_with_user=user.pk) - profile_ids = set(user_profile['followers_youknow'] + user_profile['followers_everybody'] + - user_profile['following_youknow'] + user_profile['following_everybody']) + profile_ids = set( + user_profile["followers_youknow"] + + user_profile["followers_everybody"] + + user_profile["following_youknow"] + + user_profile["following_everybody"] + ) profiles = MSocialProfile.profiles(profile_ids) - logging.user(request, "~BB~FRLoading social profile: %s" % user_profile['username']) - + logging.user(request, "~BB~FRLoading social profile: %s" % user_profile["username"]) + payload = { - 'user_profile': user_profile, - 'followers_youknow': user_profile['followers_youknow'], - 'followers_everybody': user_profile['followers_everybody'], - 'following_youknow': user_profile['following_youknow'], - 'following_everybody': user_profile['following_everybody'], - 'requested_follow': user_profile['requested_follow'], - 'profiles': dict([(p.user_id, p.canonical(compact=True)) for p in profiles]), - 'activities': activities, + "user_profile": user_profile, + "followers_youknow": user_profile["followers_youknow"], + "followers_everybody": user_profile["followers_everybody"], + "following_youknow": user_profile["following_youknow"], + "following_everybody": user_profile["following_everybody"], + "requested_follow": user_profile["requested_follow"], + "profiles": dict([(p.user_id, p.canonical(compact=True)) for p in profiles]), + "activities": activities, } if include_activities_html: - payload['activities_html'] = render_to_string('reader/activities_module.xhtml', { - 'activities': activities, - 'username': user_profile['username'], - 'public': True, - }) - + payload["activities_html"] = render_to_string( + "reader/activities_module.xhtml", + { + "activities": activities, + "username": user_profile["username"], + "public": True, + }, + ) + return payload + @ajax_login_required @json.json_view def load_user_profile(request): @@ -961,43 +1143,44 @@ def load_user_profile(request): social_services = MSocialServices.objects.get(user_id=request.user.pk) except MSocialServices.DoesNotExist: social_services = MSocialServices.objects.create(user_id=request.user.pk) - + logging.user(request, "~BB~FRLoading social profile and blurblog settings") - + return { - 'services': social_services, - 'user_profile': social_profile.canonical(include_follows=True, include_settings=True), + "services": social_services, + "user_profile": social_profile.canonical(include_follows=True, include_settings=True), } - + + @ajax_login_required @json.json_view def save_user_profile(request): data = request.POST - website = data['website'] - - if website and not website.startswith('http'): - website = 'http://' + website - + website = data["website"] + + if website and not website.startswith("http"): + website = "http://" + website + profile = MSocialProfile.get_user(request.user.pk) - profile.location = data['location'] - profile.bio = data['bio'] + profile.location = data["location"] + profile.bio = data["bio"] profile.website = website - profile.protected = is_true(data.get('protected', False)) - profile.private = is_true(data.get('private', False)) + profile.protected = is_true(data.get("protected", False)) + profile.private = is_true(data.get("private", False)) profile.save() social_services = MSocialServices.get_user(user_id=request.user.pk) - profile = social_services.set_photo(data['photo_service']) - + profile = social_services.set_photo(data["photo_service"]) + logging.user(request, "~BB~FRSaving social profile") - + return dict(code=1, user_profile=profile.canonical(include_follows=True)) @ajax_login_required @json.json_view def upload_avatar(request): - photo = request.FILES['photo'] + photo = request.FILES["photo"] profile = MSocialProfile.get_user(request.user.pk) social_services = MSocialServices.objects.get(user_id=request.user.pk) @@ -1005,7 +1188,7 @@ def upload_avatar(request): image_url = social_services.save_uploaded_photo(photo) if image_url: - profile = social_services.set_photo('upload') + profile = social_services.set_photo("upload") return { "code": 1 if image_url else -1, @@ -1014,22 +1197,24 @@ def upload_avatar(request): "user_profile": profile.canonical(include_follows=True), } + @ajax_login_required @json.json_view def save_blurblog_settings(request): data = request.POST profile = MSocialProfile.get_user(request.user.pk) - profile.custom_css = strip_tags(data.get('custom_css', None)) - profile.custom_bgcolor = strip_tags(data.get('custom_bgcolor', None)) - profile.blurblog_title = strip_tags(data.get('blurblog_title', None)) - profile.bb_permalink_direct = is_true(data.get('bb_permalink_direct', False)) + profile.custom_css = strip_tags(data.get("custom_css", None)) + profile.custom_bgcolor = strip_tags(data.get("custom_bgcolor", None)) + profile.blurblog_title = strip_tags(data.get("blurblog_title", None)) + profile.bb_permalink_direct = is_true(data.get("bb_permalink_direct", False)) profile.save() logging.user(request, "~BB~FRSaving blurblog settings") - + return dict(code=1, user_profile=profile.canonical(include_follows=True, include_settings=True)) + @json.json_view def load_follow_requests(request): user = get_user(request.user) @@ -1039,53 +1224,57 @@ def load_follow_requests(request): request_profiles = [p.canonical(include_following_user=user.pk) for p in request_profiles] if len(request_profiles): - logging.user(request, "~BB~FRLoading Follow Requests (%s requests)" % ( - len(request_profiles), - )) + logging.user(request, "~BB~FRLoading Follow Requests (%s requests)" % (len(request_profiles),)) return { - 'request_profiles': request_profiles, + "request_profiles": request_profiles, } + @ratelimit(minutes=1, requests=100) @json.json_view def load_user_friends(request): user = get_user(request.user) - social_profile = MSocialProfile.get_user(user_id=user.pk) - social_services = MSocialServices.get_user(user_id=user.pk) + social_profile = MSocialProfile.get_user(user_id=user.pk) + social_services = MSocialServices.get_user(user_id=user.pk) following_profiles = MSocialProfile.profiles(social_profile.following_user_ids) - follower_profiles = MSocialProfile.profiles(social_profile.follower_user_ids) - recommended_users = social_profile.recommended_users() + follower_profiles = MSocialProfile.profiles(social_profile.follower_user_ids) + recommended_users = social_profile.recommended_users() following_profiles = [p.canonical(include_following_user=user.pk) for p in following_profiles] - follower_profiles = [p.canonical(include_following_user=user.pk) for p in follower_profiles] - - logging.user(request, "~BB~FRLoading Friends (%s following, %s followers)" % ( - social_profile.following_count, - social_profile.follower_count, - )) + follower_profiles = [p.canonical(include_following_user=user.pk) for p in follower_profiles] + + logging.user( + request, + "~BB~FRLoading Friends (%s following, %s followers)" + % ( + social_profile.following_count, + social_profile.follower_count, + ), + ) return { - 'services': social_services, - 'autofollow': social_services.autofollow, - 'user_profile': social_profile.canonical(include_follows=True), - 'following_profiles': following_profiles, - 'follower_profiles': follower_profiles, - 'recommended_users': recommended_users, + "services": social_services, + "autofollow": social_services.autofollow, + "user_profile": social_profile.canonical(include_follows=True), + "following_profiles": following_profiles, + "follower_profiles": follower_profiles, + "recommended_users": recommended_users, } + @ajax_login_required @json.json_view def follow(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = request.POST['user_id'] + user_id = request.POST["user_id"] try: follow_user_id = int(user_id) except ValueError: try: - follow_user_id = int(user_id.replace('social:', '')) + follow_user_id = int(user_id.replace("social:", "")) follow_profile = MSocialProfile.get_user(follow_user_id) except (ValueError, MSocialProfile.DoesNotExist): - follow_username = user_id.replace('social:', '') + follow_username = user_id.replace("social:", "") try: follow_profile = MSocialProfile.objects.get(username=follow_username) except MSocialProfile.DoesNotExist: @@ -1094,54 +1283,55 @@ def follow(request): profile.follow_user(follow_user_id) follow_profile = MSocialProfile.get_user(follow_user_id) - + social_params = { - 'user_id': request.user.pk, - 'subscription_user_id': follow_user_id, - 'include_favicon': True, - 'update_counts': True, + "user_id": request.user.pk, + "subscription_user_id": follow_user_id, + "include_favicon": True, + "update_counts": True, } follow_subscription = MSocialSubscription.feeds(calculate_all_scores=True, **social_params) - + if follow_profile.user: if follow_profile.protected: logging.user(request, "~BB~FR~SBRequested~SN follow from: ~SB%s" % follow_profile.user.username) else: logging.user(request, "~BB~FRFollowing: ~SB%s" % follow_profile.user.username) - + return { - "user_profile": profile.canonical(include_follows=True), + "user_profile": profile.canonical(include_follows=True), "follow_profile": follow_profile.canonical(common_follows_with_user=request.user.pk), "follow_subscription": follow_subscription, } - + + @ajax_login_required @json.json_view def unfollow(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = request.POST['user_id'] + user_id = request.POST["user_id"] try: unfollow_user_id = int(user_id) except ValueError: try: - unfollow_user_id = int(user_id.replace('social:', '')) + unfollow_user_id = int(user_id.replace("social:", "")) unfollow_profile = MSocialProfile.get_user(unfollow_user_id) except (ValueError, MSocialProfile.DoesNotExist): - unfollow_username = user_id.replace('social:', '') + unfollow_username = user_id.replace("social:", "") try: unfollow_profile = MSocialProfile.objects.get(username=unfollow_username) except MSocialProfile.DoesNotExist: raise Http404 unfollow_user_id = unfollow_profile.user_id - + profile.unfollow_user(unfollow_user_id) unfollow_profile = MSocialProfile.get_user(unfollow_user_id) - + logging.user(request, "~BB~FRUnfollowing: ~SB%s" % unfollow_profile.username) - + return { - 'user_profile': profile.canonical(include_follows=True), - 'unfollow_profile': unfollow_profile.canonical(common_follows_with_user=request.user.pk), + "user_profile": profile.canonical(include_follows=True), + "unfollow_profile": unfollow_profile.canonical(common_follows_with_user=request.user.pk), } @@ -1149,80 +1339,84 @@ def unfollow(request): @json.json_view def approve_follower(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = int(request.POST['user_id']) + user_id = int(request.POST["user_id"]) follower_profile = MSocialProfile.get_user(user_id) code = -1 - + logging.user(request, "~BB~FRApproving follow: ~SB%s" % follower_profile.username) - + if user_id in profile.requested_follow_user_ids: follower_profile.follow_user(request.user.pk, force=True) code = 1 - - return {'code': code} + + return {"code": code} + @ajax_login_required @json.json_view def ignore_follower(request): profile = MSocialProfile.get_user(request.user.pk) - user_id = int(request.POST['user_id']) + user_id = int(request.POST["user_id"]) follower_profile = MSocialProfile.get_user(user_id) code = -1 - + logging.user(request, "~BB~FR~SK~SBNOT~SN approving follow: ~SB%s" % follower_profile.username) - + if user_id in profile.requested_follow_user_ids: follower_profile.unfollow_user(request.user.pk) code = 1 - - return {'code': code} + + return {"code": code} + @ajax_login_required -@required_params('user_id', method="POST") +@required_params("user_id", method="POST") @json.json_view def mute_user(request): profile = MSocialProfile.get_user(request.user.pk) - muting_user_id = int(request.POST['user_id']) + muting_user_id = int(request.POST["user_id"]) social_profile = MSocialProfile.get_user(request.user.pk) muting_profile = MSocialProfile.get_user(muting_user_id) code = 1 - + logging.user(request, "~FMMuting user ~SB%s" % muting_profile.username) - + social_profile.mute_user(muting_user_id) - + return { - 'code': code, - 'user_profile': social_profile.canonical(), + "code": code, + "user_profile": social_profile.canonical(), } + @ajax_login_required -@required_params('user_id', method="POST") +@required_params("user_id", method="POST") @json.json_view def unmute_user(request): profile = MSocialProfile.get_user(request.user.pk) - muting_user_id = int(request.POST['user_id']) + muting_user_id = int(request.POST["user_id"]) muting_profile = MSocialProfile.get_user(muting_user_id) code = 1 - + logging.user(request, "~FM~SBUn-~SN~FMMuting user ~SB%s" % muting_profile.username) - + profile.unmute_user(muting_user_id) - + return { - 'code': code, - 'user_profile': profile.canonical(), + "code": code, + "user_profile": profile.canonical(), } -@required_params('query', method="GET") + +@required_params("query", method="GET") @json.json_view def find_friends(request): - query = request.GET['query'] - limit = int(request.GET.get('limit', 3)) + query = request.GET["query"] + limit = int(request.GET.get("limit", 3)) profiles = [] - - if '@' in query: - results = re.search(r'[\w\.-]+@[\w\.-]+', query) + + if "@" in query: + results = re.search(r"[\w\.-]+@[\w\.-]+", query) if results: email = results.group(0) profiles = MSocialProfile.objects.filter(email__iexact=email)[:limit] @@ -1238,110 +1432,141 @@ def find_friends(request): profiles = MSocialProfile.objects.filter(blurblog_title__icontains=query)[:limit] if not profiles: profiles = MSocialProfile.objects.filter(location__icontains=query)[:limit] - + profiles = [p.canonical(include_following_user=request.user.pk) for p in profiles] - profiles = sorted(profiles, key=lambda p: -1 * p['shared_stories_count']) + profiles = sorted(profiles, key=lambda p: -1 * p["shared_stories_count"]) return dict(profiles=profiles) + @ajax_login_required def like_comment(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = int(request.POST['comment_user_id']) - format = request.POST.get('format', 'json') - + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = int(request.POST["comment_user_id"]) + format = request.POST.get("format", "json") + if comment_user_id == request.user.pk: - return json.json_response(request, {'code': -1, 'message': 'You cannot favorite your own shared story comment.'}) + return json.json_response( + request, {"code": -1, "message": "You cannot favorite your own shared story comment."} + ) try: - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) except MSharedStory.DoesNotExist: - return json.json_response(request, {'code': -1, 'message': 'The shared comment cannot be found.'}) - + return json.json_response(request, {"code": -1, "message": "The shared comment cannot be found."}) + shared_story.add_liking_user(request.user.pk) comment, profiles = shared_story.comment_with_author_and_profiles() comment_user = User.objects.get(pk=shared_story.user_id) - logging.user(request, "~BB~FMLiking comment by ~SB%s~SN: %s" % ( - comment_user.username, - shared_story.comments[:30], - )) - - MActivity.new_comment_like(liking_user_id=request.user.pk, - comment_user_id=comment['user_id'], - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title, - comments=shared_story.comments) - MInteraction.new_comment_like(liking_user_id=request.user.pk, - comment_user_id=comment['user_id'], - story_id=story_id, - story_feed_id=feed_id, - story_title=shared_story.story_title, - comments=shared_story.comments) - - if format == 'html': + logging.user( + request, + "~BB~FMLiking comment by ~SB%s~SN: %s" + % ( + comment_user.username, + shared_story.comments[:30], + ), + ) + + MActivity.new_comment_like( + liking_user_id=request.user.pk, + comment_user_id=comment["user_id"], + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + comments=shared_story.comments, + ) + MInteraction.new_comment_like( + liking_user_id=request.user.pk, + comment_user_id=comment["user_id"], + story_id=story_id, + story_feed_id=feed_id, + story_title=shared_story.story_title, + comments=shared_story.comments, + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles, - }) - + return json.json_response( + request, + { + "code": code, + "comment": comment, + "user_profiles": profiles, + }, + ) + + @ajax_login_required def remove_like_comment(request): - code = 1 - feed_id = int(request.POST['story_feed_id']) - story_id = request.POST['story_id'] - comment_user_id = request.POST['comment_user_id'] - format = request.POST.get('format', 'json') - - shared_story = MSharedStory.objects.get(user_id=comment_user_id, - story_feed_id=feed_id, - story_guid=story_id) + code = 1 + feed_id = int(request.POST["story_feed_id"]) + story_id = request.POST["story_id"] + comment_user_id = request.POST["comment_user_id"] + format = request.POST.get("format", "json") + + shared_story = MSharedStory.objects.get( + user_id=comment_user_id, story_feed_id=feed_id, story_guid=story_id + ) shared_story.remove_liking_user(request.user.pk) comment, profiles = shared_story.comment_with_author_and_profiles() comment_user = User.objects.get(pk=shared_story.user_id) - logging.user(request, "~BB~FMRemoving like on comment by ~SB%s~SN: %s" % ( - comment_user.username, - shared_story.comments[:30], - )) - - if format == 'html': + logging.user( + request, + "~BB~FMRemoving like on comment by ~SB%s~SN: %s" + % ( + comment_user.username, + shared_story.comments[:30], + ), + ) + + if format == "html": comment = MSharedStory.attach_users_to_comment(comment, profiles) - return render(request, 'social/story_comment.xhtml', { - 'comment': comment, - }) + return render( + request, + "social/story_comment.xhtml", + { + "comment": comment, + }, + ) else: - return json.json_response(request, { - 'code': code, - 'comment': comment, - 'user_profiles': profiles, - }) + return json.json_response( + request, + { + "code": code, + "comment": comment, + "user_profiles": profiles, + }, + ) + + def get_subdomain(request): - host = request.META.get('HTTP_HOST') + host = request.META.get("HTTP_HOST") if host.count(".") == 2: return host.split(".")[0] else: return None + def shared_stories_rss_feed_noid(request): - index = HttpResponseRedirect('http://%s%s' % ( - Site.objects.get_current().domain, - reverse('index'))) + index = HttpResponseRedirect("http://%s%s" % (Site.objects.get_current().domain, reverse("index"))) if get_subdomain(request): username = get_subdomain(request) try: - if '.' in username: - username = username.split('.')[0] + if "." in username: + username = username.split(".")[0] user = User.objects.get(username__iexact=username) except User.DoesNotExist: return index @@ -1349,6 +1574,7 @@ def shared_stories_rss_feed_noid(request): return index + @ratelimit(minutes=1, requests=5) def shared_stories_rss_feed(request, user_id, username=None): try: @@ -1357,81 +1583,89 @@ def shared_stories_rss_feed(request, user_id, username=None): raise Http404 limit = 25 - offset = request.GET.get('page', 0) * limit + offset = request.GET.get("page", 0) * limit username = username and username.lower() profile = MSocialProfile.get_user(user.pk) - params = {'username': profile.username_slug, 'user_id': user.pk} + params = {"username": profile.username_slug, "user_id": user.pk} if not username or profile.username_slug.lower() != username: - return HttpResponseRedirect(reverse('shared-stories-rss-feed', kwargs=params)) + return HttpResponseRedirect(reverse("shared-stories-rss-feed", kwargs=params)) social_profile = MSocialProfile.get_user(user_id) current_site = Site.objects.get_current() current_site = current_site and current_site.domain - + if social_profile.private: return HttpResponseForbidden() - + data = {} - data['title'] = social_profile.title - data['link'] = social_profile.blurblog_url - data['description'] = "Stories shared by %s on NewsBlur." % user.username - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['author_name'] = user.username - data['feed_url'] = "http://%s%s" % ( + data["title"] = social_profile.title + data["link"] = social_profile.blurblog_url + data["description"] = "Stories shared by %s on NewsBlur." % user.username + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["author_name"] = user.username + data["feed_url"] = "http://%s%s" % ( current_site, - reverse('shared-stories-rss-feed', kwargs=params), + reverse("shared-stories-rss-feed", kwargs=params), ) rss = feedgenerator.Atom1Feed(**data) - shared_stories = MSharedStory.objects.filter(user_id=user.pk).order_by('-shared_date')[offset:offset+limit] + shared_stories = MSharedStory.objects.filter(user_id=user.pk).order_by("-shared_date")[ + offset : offset + limit + ] for shared_story in shared_stories: feed = Feed.get_by_id(shared_story.story_feed_id) - content = render_to_string('social/rss_story.xhtml', { - 'feed': feed, - 'user': user, - 'social_profile': social_profile, - 'shared_story': shared_story, - 'content': shared_story.story_content_str, - }) + content = render_to_string( + "social/rss_story.xhtml", + { + "feed": feed, + "user": user, + "social_profile": social_profile, + "shared_story": shared_story, + "content": shared_story.story_content_str, + }, + ) story_data = { - 'title': shared_story.story_title, - 'link': shared_story.story_permalink, - 'description': content, - 'author_name': shared_story.story_author_name, - 'categories': shared_story.story_tags, - 'unique_id': shared_story.story_permalink, - 'pubdate': shared_story.shared_date, + "title": shared_story.story_title, + "link": shared_story.story_permalink, + "description": content, + "author_name": shared_story.story_author_name, + "categories": shared_story.story_tags, + "unique_id": shared_story.story_permalink, + "pubdate": shared_story.shared_date, } rss.add_item(**story_data) - - logging.user(request, "~FBGenerating ~SB%s~SN's RSS feed: ~FM%s" % ( - user.username, - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') - -@required_params('user_id', method="GET") + + logging.user( + request, + "~FBGenerating ~SB%s~SN's RSS feed: ~FM%s" + % (user.username, request.META.get("HTTP_USER_AGENT", "")[:24]), + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") + + +@required_params("user_id", method="GET") @json.json_view def social_feed_trainer(request): - social_user_id = request.GET['user_id'] + social_user_id = request.GET["user_id"] social_profile = MSocialProfile.get_user(social_user_id) social_user = get_object_or_404(User, pk=social_user_id) user = get_user(request) - + social_profile.count_stories() classifier = social_profile.canonical() - classifier['classifiers'] = get_classifiers_for_user(user, social_user_id=classifier['id']) - classifier['num_subscribers'] = social_profile.follower_count - classifier['feed_tags'] = [] - classifier['feed_authors'] = [] - - logging.user(user, "~FGLoading social trainer on ~SB%s: %s" % ( - social_user.username, social_profile.title)) - + classifier["classifiers"] = get_classifiers_for_user(user, social_user_id=classifier["id"]) + classifier["num_subscribers"] = social_profile.follower_count + classifier["feed_tags"] = [] + classifier["feed_authors"] = [] + + logging.user( + user, "~FGLoading social trainer on ~SB%s: %s" % (social_user.username, social_profile.title) + ) + return [classifier] - + @json.json_view def load_social_statistics(request, social_user_id, username=None): @@ -1439,96 +1673,101 @@ def load_social_statistics(request, social_user_id, username=None): social_profile = MSocialProfile.get_user(social_user_id) social_profile.save_feed_story_history_statistics() social_profile.save_classifier_counts() - + # Stories per month - average and month-by-month breakout - stats['average_stories_per_month'] = social_profile.average_stories_per_month - stats['story_count_history'] = social_profile.story_count_history - stats['story_hours_history'] = social_profile.story_hours_history - stats['story_days_history'] = social_profile.story_days_history - + stats["average_stories_per_month"] = social_profile.average_stories_per_month + stats["story_count_history"] = social_profile.story_count_history + stats["story_hours_history"] = social_profile.story_hours_history + stats["story_days_history"] = social_profile.story_days_history + # Subscribers - stats['subscriber_count'] = social_profile.follower_count - stats['num_subscribers'] = social_profile.follower_count - + stats["subscriber_count"] = social_profile.follower_count + stats["num_subscribers"] = social_profile.follower_count + # Classifier counts - stats['classifier_counts'] = social_profile.feed_classifier_counts - + stats["classifier_counts"] = social_profile.feed_classifier_counts + # Feeds - feed_ids = [c['feed_id'] for c in stats['classifier_counts'].get('feed', [])] - feeds = Feed.objects.filter(pk__in=feed_ids).only('feed_title') + feed_ids = [c["feed_id"] for c in stats["classifier_counts"].get("feed", [])] + feeds = Feed.objects.filter(pk__in=feed_ids).only("feed_title") titles = dict([(f.pk, f.feed_title) for f in feeds]) - for stat in stats['classifier_counts'].get('feed', []): - stat['feed_title'] = titles.get(stat['feed_id'], "") - - logging.user(request, "~FBStatistics social: ~SB%s ~FG(%s subs)" % ( - social_profile.user_id, social_profile.follower_count)) + for stat in stats["classifier_counts"].get("feed", []): + stat["feed_title"] = titles.get(stat["feed_id"], "") + + logging.user( + request, + "~FBStatistics social: ~SB%s ~FG(%s subs)" % (social_profile.user_id, social_profile.follower_count), + ) return stats + @json.json_view def load_social_settings(request, social_user_id, username=None): social_profile = MSocialProfile.get_user(social_user_id) - + return social_profile.canonical() + @ajax_login_required def load_interactions(request): - user_id = request.GET.get('user_id', None) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - if not user_id or 'null' in user_id: + user_id = request.GET.get("user_id", None) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + if not user_id or "null" in user_id: user_id = get_user(request).pk - page = max(1, int(request.GET.get('page', 1))) - limit = request.GET.get('limit') - interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit, - categories=categories) - format = request.GET.get('format', None) - - data = { - 'interactions': interactions, - 'page': page, - 'has_next_page': has_next_page - } - + page = max(1, int(request.GET.get("page", 1))) + limit = request.GET.get("limit") + interactions, has_next_page = MInteraction.user(user_id, page=page, limit=limit, categories=categories) + format = request.GET.get("format", None) + + data = {"interactions": interactions, "page": page, "has_next_page": has_next_page} + logging.user(request, "~FBLoading interactions ~SBp/%s" % page) - - if format == 'html': - return render(request, 'reader/interactions_module.xhtml', data) + + if format == "html": + return render(request, "reader/interactions_module.xhtml", data) else: return json.json_response(request, data) + @ajax_login_required def load_activities(request): - user_id = request.GET.get('user_id', None) - categories = request.GET.getlist('category') or request.GET.getlist('category[]') - if user_id and 'null' not in user_id: + user_id = request.GET.get("user_id", None) + categories = request.GET.getlist("category") or request.GET.getlist("category[]") + if user_id and "null" not in user_id: user_id = int(user_id) user = User.objects.get(pk=user_id) else: user = get_user(request) user_id = user.pk - + public = user_id != request.user.pk - page = max(1, int(request.GET.get('page', 1))) - limit = request.GET.get('limit', 4) - activities, has_next_page = MActivity.user(user_id, page=page, limit=limit, public=public, - categories=categories) - format = request.GET.get('format', None) - + page = max(1, int(request.GET.get("page", 1))) + limit = request.GET.get("limit", 4) + activities, has_next_page = MActivity.user( + user_id, page=page, limit=limit, public=public, categories=categories + ) + format = request.GET.get("format", None) + data = { - 'activities': activities, - 'page': page, - 'has_next_page': has_next_page, - 'username': (user.username if public else 'You'), + "activities": activities, + "page": page, + "has_next_page": has_next_page, + "username": (user.username if public else "You"), } - + logging.user(request, "~FBLoading activities ~SBp/%s" % page) - - if format == 'html': - return render(request, 'reader/activities_module.xhtml', data, - ) + + if format == "html": + return render( + request, + "reader/activities_module.xhtml", + data, + ) else: return json.json_response(request, data) + @json.json_view def comment(request, comment_id): try: @@ -1537,13 +1776,14 @@ def comment(request, comment_id): raise Http404 return shared_story.comments_with_author() + @json.json_view def comment_reply(request, comment_id, reply_id): try: shared_story = MSharedStory.objects.get(id=comment_id) except MSharedStory.DoesNotExist: raise Http404 - + for story_reply in shared_story.replies: if story_reply.reply_id == ObjectId(reply_id): return story_reply diff --git a/apps/static/tests.py b/apps/static/tests.py index 2247054b35..3748f41ba4 100644 --- a/apps/static/tests.py +++ b/apps/static/tests.py @@ -7,6 +7,7 @@ from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ def test_basic_addition(self): """ self.failUnlessEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/static/views.py b/apps/static/views.py index c5c98f05cc..934a38ef93 100644 --- a/apps/static/views.py +++ b/apps/static/views.py @@ -8,102 +8,123 @@ from apps.search.models import SearchFeed from utils import log as logging + def about(request): - return render(request, 'static/about.xhtml') - + return render(request, "static/about.xhtml") + + def faq(request): - return render(request, 'static/faq.xhtml') - + return render(request, "static/faq.xhtml") + + def api(request): - filename = settings.TEMPLATES[0]['DIRS'][0] + '/static/api.yml' + filename = settings.TEMPLATES[0]["DIRS"][0] + "/static/api.yml" api_yml_file = open(filename).read() - data = yaml.load(api_yml_file) + data = yaml.load(api_yml_file) + + return render(request, "static/api.xhtml", {"data": data}) + - return render(request, 'static/api.xhtml', {'data': data}) - def press(request): - return render(request, 'static/press.xhtml') + return render(request, "static/press.xhtml") + def privacy(request): - return render(request, 'static/privacy.xhtml') + return render(request, "static/privacy.xhtml") + def tos(request): - return render(request, 'static/tos.xhtml') + return render(request, "static/tos.xhtml") + def webmanifest(request): - filename = settings.MEDIA_ROOT + '/extensions/edge/manifest.json' + filename = settings.MEDIA_ROOT + "/extensions/edge/manifest.json" manifest = open(filename).read() - - return HttpResponse(manifest, content_type='application/manifest+json') + + return HttpResponse(manifest, content_type="application/manifest+json") + def apple_app_site_assoc(request): - return render(request, 'static/apple_app_site_assoc.xhtml') - + return render(request, "static/apple_app_site_assoc.xhtml") + + def apple_developer_merchantid(request): - return render(request, 'static/apple_developer_merchantid.xhtml') + return render(request, "static/apple_developer_merchantid.xhtml") + def feedback(request): - return render(request, 'static/feedback.xhtml') + return render(request, "static/feedback.xhtml") + def firefox(request): - filename = settings.MEDIA_ROOT + '/extensions/firefox/manifest.json' + filename = settings.MEDIA_ROOT + "/extensions/firefox/manifest.json" manifest = open(filename).read() - - return HttpResponse(manifest, content_type='application/x-web-app-manifest+json') + + return HttpResponse(manifest, content_type="application/x-web-app-manifest+json") + def ios(request): - return render(request, 'static/ios.xhtml') - + return render(request, "static/ios.xhtml") + + def android(request): - return render(request, 'static/android.xhtml') - + return render(request, "static/android.xhtml") + + def ios_download(request): - return render(request, 'static/ios_download.xhtml') - + return render(request, "static/ios_download.xhtml") + + def ios_plist(request): - filename = os.path.join(settings.NEWSBLUR_DIR, 'clients/ios/NewsBlur.plist') + filename = os.path.join(settings.NEWSBLUR_DIR, "clients/ios/NewsBlur.plist") manifest = open(filename).read() - + logging.user(request, "~SK~FR~BBDownloading NewsBlur.plist...") - return HttpResponse(manifest, content_type='text/xml') - + return HttpResponse(manifest, content_type="text/xml") + + def ios_ipa(request): - filename = os.path.join(settings.NEWSBLUR_DIR, 'clients/ios/NewsBlur.ipa') + filename = os.path.join(settings.NEWSBLUR_DIR, "clients/ios/NewsBlur.ipa") manifest = open(filename).read() - + logging.user(request, "~SK~FR~BBDownloading NewsBlur.ipa...") - return HttpResponse(manifest, content_type='application/octet-stream') + return HttpResponse(manifest, content_type="application/octet-stream") + def haproxy_check(request): return HttpResponse("OK") + def postgres_check(request): - feed = Feed.objects.latest('pk').pk + feed = Feed.objects.latest("pk").pk if feed: return HttpResponse(unicode(feed)) assert False, "Cannot read from postgres database" + def mongo_check(request): stories = MStory.objects.count() if stories: return HttpResponse(unicode(stories)) assert False, "Cannot read from mongo database" + def elasticsearch_check(request): client = SearchFeed.ES() if client.indices.exists_index(SearchFeed.index_name()): return HttpResponse(SearchFeed.index_name()) assert False, "Cannot read from elasticsearch database" + def redis_check(request): - pool = request.GET['pool'] - if pool == 'main': + pool = request.GET["pool"] + if pool == "main": r = redis.Redis(connection_pool=settings.REDIS_POOL) - elif pool == 'story': + elif pool == "story": r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) - elif pool == 'sessions': + elif pool == "sessions": r = redis.Redis(connection_pool=settings.REDIS_SESSION_POOL) - + key = r.randomkey() if key: return HttpResponse(unicode(key)) diff --git a/apps/statistics/management/commands/collect_feedback.py b/apps/statistics/management/commands/collect_feedback.py index 38f0e93785..64ffa8d51a 100644 --- a/apps/statistics/management/commands/collect_feedback.py +++ b/apps/statistics/management/commands/collect_feedback.py @@ -1,7 +1,7 @@ from django.core.management.base import BaseCommand from apps.statistics.models import MFeedback -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): - MFeedback.collect_feedback() \ No newline at end of file + MFeedback.collect_feedback() diff --git a/apps/statistics/management/commands/collect_stats.py b/apps/statistics/management/commands/collect_stats.py index eea39564e6..d664faf699 100644 --- a/apps/statistics/management/commands/collect_stats.py +++ b/apps/statistics/management/commands/collect_stats.py @@ -1,8 +1,7 @@ from django.core.management.base import BaseCommand from apps.statistics.models import MStatistics -class Command(BaseCommand): +class Command(BaseCommand): def handle(self, *args, **options): MStatistics.collect_statistics() - \ No newline at end of file diff --git a/apps/statistics/models.py b/apps/statistics/models.py index 9d4e5fa5e6..9d3df0934d 100644 --- a/apps/statistics/models.py +++ b/apps/statistics/models.py @@ -13,20 +13,21 @@ from utils import db_functions from utils import log as logging + class MStatistics(mongo.Document): - key = mongo.StringField(unique=True) + key = mongo.StringField(unique=True) value = mongo.DynamicField() expiration_date = mongo.DateTimeField() - + meta = { - 'collection': 'statistics', - 'allow_inheritance': False, - 'indexes': ['key'], + "collection": "statistics", + "allow_inheritance": False, + "indexes": ["key"], } - + def __str__(self): return "%s: %s" % (self.key, self.value) - + @classmethod def get(cls, key, default=None, set_default=False, expiration_sec=None): obj = cls.objects.filter(key=key).first() @@ -53,25 +54,31 @@ def set(cls, key, value, expiration_sec=None): if expiration_sec: obj.expiration_date = datetime.datetime.now() + datetime.timedelta(seconds=expiration_sec) obj.save() - + @classmethod def all(cls): stats = cls.objects.all() values = dict([(stat.key, stat.value) for stat in stats]) for key, value in list(values.items()): - if key in ('avg_time_taken', 'sites_loaded', 'stories_shared'): + if key in ("avg_time_taken", "sites_loaded", "stories_shared"): values[key] = json.decode(value) - elif key in ('feeds_fetched', 'premium_users', 'standard_users', 'latest_sites_loaded', - 'max_sites_loaded', 'max_stories_shared'): + elif key in ( + "feeds_fetched", + "premium_users", + "standard_users", + "latest_sites_loaded", + "max_sites_loaded", + "max_stories_shared", + ): values[key] = int(value) - elif key in ('latest_avg_time_taken', 'max_avg_time_taken', 'last_1_min_time_taken'): + elif key in ("latest_avg_time_taken", "max_avg_time_taken", "last_1_min_time_taken"): values[key] = float(value) - - values['total_sites_loaded'] = sum(values['sites_loaded']) if 'sites_loaded' in values else 0 - values['total_stories_shared'] = sum(values['stories_shared']) if 'stories_shared' in values else 0 + + values["total_sites_loaded"] = sum(values["sites_loaded"]) if "sites_loaded" in values else 0 + values["total_stories_shared"] = sum(values["stories_shared"]) if "stories_shared" in values else 0 return values - + @classmethod def collect_statistics(cls): now = datetime.datetime.now() @@ -93,34 +100,38 @@ def collect_statistics(cls): cls.collect_statistics_feeds_fetched() # if settings.DEBUG: # print("Feeds Fetched: %s" % (datetime.datetime.now() - now)) - + @classmethod def collect_statistics_feeds_fetched(cls): - feeds_fetched = RStats.count('feed_fetch', hours=24) - cls.objects(key='feeds_fetched').update_one(upsert=True, - set__key='feeds_fetched', - set__value=feeds_fetched) - + feeds_fetched = RStats.count("feed_fetch", hours=24) + cls.objects(key="feeds_fetched").update_one( + upsert=True, set__key="feeds_fetched", set__value=feeds_fetched + ) + return feeds_fetched - + @classmethod def collect_statistics_premium_users(cls): last_day = datetime.datetime.now() - datetime.timedelta(hours=24) - + premium_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=True).count() - cls.objects(key='premium_users').update_one(upsert=True, set__key='premium_users', set__value=premium_users) - + cls.objects(key="premium_users").update_one( + upsert=True, set__key="premium_users", set__value=premium_users + ) + return premium_users - + @classmethod def collect_statistics_standard_users(cls): last_day = datetime.datetime.now() - datetime.timedelta(hours=24) - + standard_users = Profile.objects.filter(last_seen_on__gte=last_day, is_premium=False).count() - cls.objects(key='standard_users').update_one(upsert=True, set__key='standard_users', set__value=standard_users) - + cls.objects(key="standard_users").update_one( + upsert=True, set__key="standard_users", set__value=standard_users + ) + return standard_users - + @classmethod def collect_statistics_sites_loaded(cls): now = round_time(datetime.datetime.now(), round_to=60) @@ -130,23 +141,23 @@ def collect_statistics_sites_loaded(cls): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) for hours_ago in range(24): - start_hours_ago = now - datetime.timedelta(hours=hours_ago+1) - + start_hours_ago = now - datetime.timedelta(hours=hours_ago + 1) + pipe = r.pipeline() for m in range(60): minute = start_hours_ago + datetime.timedelta(minutes=m) - key = "%s:%s" % (RStats.stats_type('page_load'), minute.strftime('%s')) + key = "%s:%s" % (RStats.stats_type("page_load"), minute.strftime("%s")) pipe.get("%s:s" % key) pipe.get("%s:a" % key) - + times = pipe.execute() - + counts = [int(c) for c in times[::2] if c] avgs = [float(a) for a in times[1::2] if a] - + if hours_ago == 0: last_1_min_time_taken = round(sum(avgs[:1]) / max(1, sum(counts[:1])), 2) - + if counts and avgs: count = max(1, sum(counts)) avg = round(sum(avgs) / count, 3) @@ -161,81 +172,81 @@ def collect_statistics_sites_loaded(cls): avg_time_taken.reverse() values = ( - ('sites_loaded', json.encode(sites_loaded)), - ('avg_time_taken', json.encode(avg_time_taken)), - ('latest_sites_loaded', sites_loaded[-1]), - ('latest_avg_time_taken', avg_time_taken[-1]), - ('max_sites_loaded', max(sites_loaded)), - ('max_avg_time_taken', max(1, max(avg_time_taken))), - ('last_1_min_time_taken', last_1_min_time_taken), + ("sites_loaded", json.encode(sites_loaded)), + ("avg_time_taken", json.encode(avg_time_taken)), + ("latest_sites_loaded", sites_loaded[-1]), + ("latest_avg_time_taken", avg_time_taken[-1]), + ("max_sites_loaded", max(sites_loaded)), + ("max_avg_time_taken", max(1, max(avg_time_taken))), + ("last_1_min_time_taken", last_1_min_time_taken), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) - + @classmethod def collect_statistics_stories_shared(cls): now = datetime.datetime.now() stories_shared = [] - + for hour in range(24): start_hours_ago = now - datetime.timedelta(hours=hour) - end_hours_ago = now - datetime.timedelta(hours=hour+1) + end_hours_ago = now - datetime.timedelta(hours=hour + 1) shares = MSharedStory.objects.filter( - shared_date__lte=start_hours_ago, - shared_date__gte=end_hours_ago + shared_date__lte=start_hours_ago, shared_date__gte=end_hours_ago ).count() stories_shared.append(shares) stories_shared.reverse() - + values = ( - ('stories_shared', json.encode(stories_shared)), - ('latest_stories_shared', stories_shared[-1]), - ('max_stories_shared', max(stories_shared)), + ("stories_shared", json.encode(stories_shared)), + ("latest_stories_shared", stories_shared[-1]), + ("max_stories_shared", max(stories_shared)), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) - + @classmethod def collect_statistics_for_db(cls, debug=False): lag = db_functions.mongo_max_replication_lag(settings.MONGODB) - cls.set('mongodb_replication_lag', lag) - + cls.set("mongodb_replication_lag", lag) + now = round_time(datetime.datetime.now(), round_to=60) r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) db_times = {} latest_db_times = {} - for db in ['sql', - 'mongo', - 'redis', - 'redis_user', - 'redis_story', - 'redis_session', - 'redis_pubsub', - 'task_sql', - 'task_mongo', - 'task_redis', - 'task_redis_user', - 'task_redis_story', - 'task_redis_session', - 'task_redis_pubsub', - ]: + for db in [ + "sql", + "mongo", + "redis", + "redis_user", + "redis_story", + "redis_session", + "redis_pubsub", + "task_sql", + "task_mongo", + "task_redis", + "task_redis_user", + "task_redis_story", + "task_redis_session", + "task_redis_pubsub", + ]: db_times[db] = [] for hour in range(24): - start_hours_ago = now - datetime.timedelta(hours=hour+1) + start_hours_ago = now - datetime.timedelta(hours=hour + 1) pipe = r.pipeline() for m in range(60): minute = start_hours_ago + datetime.timedelta(minutes=m) - key = "DB:%s:%s" % (db, minute.strftime('%s')) + key = "DB:%s:%s" % (db, minute.strftime("%s")) if debug: print(" -> %s:c" % key) pipe.get("%s:c" % key) pipe.get("%s:t" % key) - + times = pipe.execute() - + counts = [int(c or 0) for c in times[::2]] avgs = [float(a or 0) for a in times[1::2]] if counts and avgs: @@ -244,7 +255,7 @@ def collect_statistics_for_db(cls, debug=False): else: count = 0 avg = 0 - + if hour == 0: latest_count = float(counts[-1]) if len(counts) else 0 latest_avg = float(avgs[-1]) if len(avgs) else 0 @@ -254,85 +265,91 @@ def collect_statistics_for_db(cls, debug=False): db_times[db].reverse() values = ( - ('avg_sql_times', json.encode(db_times['sql'])), - ('avg_mongo_times', json.encode(db_times['mongo'])), - ('avg_redis_times', json.encode(db_times['redis'])), - ('latest_sql_avg', latest_db_times['sql']), - ('latest_mongo_avg', latest_db_times['mongo']), - ('latest_redis_user_avg', latest_db_times['redis_user']), - ('latest_redis_story_avg', latest_db_times['redis_story']), - ('latest_redis_session_avg',latest_db_times['redis_session']), - ('latest_redis_pubsub_avg', latest_db_times['redis_pubsub']), - ('latest_task_sql_avg', latest_db_times['task_sql']), - ('latest_task_mongo_avg', latest_db_times['task_mongo']), - ('latest_task_redis_user_avg', latest_db_times['task_redis_user']), - ('latest_task_redis_story_avg', latest_db_times['task_redis_story']), - ('latest_task_redis_session_avg',latest_db_times['task_redis_session']), - ('latest_task_redis_pubsub_avg', latest_db_times['task_redis_pubsub']), + ("avg_sql_times", json.encode(db_times["sql"])), + ("avg_mongo_times", json.encode(db_times["mongo"])), + ("avg_redis_times", json.encode(db_times["redis"])), + ("latest_sql_avg", latest_db_times["sql"]), + ("latest_mongo_avg", latest_db_times["mongo"]), + ("latest_redis_user_avg", latest_db_times["redis_user"]), + ("latest_redis_story_avg", latest_db_times["redis_story"]), + ("latest_redis_session_avg", latest_db_times["redis_session"]), + ("latest_redis_pubsub_avg", latest_db_times["redis_pubsub"]), + ("latest_task_sql_avg", latest_db_times["task_sql"]), + ("latest_task_mongo_avg", latest_db_times["task_mongo"]), + ("latest_task_redis_user_avg", latest_db_times["task_redis_user"]), + ("latest_task_redis_story_avg", latest_db_times["task_redis_story"]), + ("latest_task_redis_session_avg", latest_db_times["task_redis_session"]), + ("latest_task_redis_pubsub_avg", latest_db_times["task_redis_pubsub"]), ) for key, value in values: cls.objects(key=key).update_one(upsert=True, set__key=key, set__value=value) class MFeedback(mongo.Document): - date = mongo.DateTimeField() + date = mongo.DateTimeField() date_short = mongo.StringField() subject = mongo.StringField() - url = mongo.StringField() - style = mongo.StringField() - order = mongo.IntField() - + url = mongo.StringField() + style = mongo.StringField() + order = mongo.IntField() + meta = { - 'collection': 'feedback', - 'allow_inheritance': False, - 'indexes': ['style'], - 'ordering': ['order'], + "collection": "feedback", + "allow_inheritance": False, + "indexes": ["style"], + "ordering": ["order"], } - + CATEGORIES = { - 5: 'idea', - 6: 'problem', - 7: 'praise', - 8: 'question', - 9: 'admin', - 10: 'updates', + 5: "idea", + 6: "problem", + 7: "praise", + 8: "question", + 9: "admin", + 10: "updates", } - + def __str__(self): return "%s: (%s) %s" % (self.style, self.date, self.subject) - + @classmethod def collect_feedback(cls): seen_posts = set() try: - data = requests.get('https://forum.newsblur.com/posts.json', timeout=3).content + data = requests.get("https://forum.newsblur.com/posts.json", timeout=3).content except (urllib.error.HTTPError, requests.exceptions.ConnectTimeout) as e: logging.debug(" ***> Failed to collect feedback: %s" % e) return - data = json.decode(data).get('latest_posts', "") + data = json.decode(data).get("latest_posts", "") if not len(data): print("No data!") return - + cls.objects.delete() post_count = 0 for post in data: - if post['topic_id'] in seen_posts: continue - seen_posts.add(post['topic_id']) + if post["topic_id"] in seen_posts: + continue + seen_posts.add(post["topic_id"]) feedback = {} - feedback['order'] = post_count + feedback["order"] = post_count post_count += 1 - feedback['date'] = dateutil.parser.parse(post['created_at']).replace(tzinfo=None) - feedback['date_short'] = relative_date(feedback['date']) - feedback['subject'] = post['topic_title'] - feedback['url'] = "https://forum.newsblur.com/t/%s/%s/%s" % (post['topic_slug'], post['topic_id'], post['post_number']) - feedback['style'] = cls.CATEGORIES[post['category_id']] + feedback["date"] = dateutil.parser.parse(post["created_at"]).replace(tzinfo=None) + feedback["date_short"] = relative_date(feedback["date"]) + feedback["subject"] = post["topic_title"] + feedback["url"] = "https://forum.newsblur.com/t/%s/%s/%s" % ( + post["topic_slug"], + post["topic_id"], + post["post_number"], + ) + feedback["style"] = cls.CATEGORIES[post["category_id"]] cls.objects.create(**feedback) # if settings.DEBUG: # print("%s: %s (%s)" % (feedback['style'], feedback['subject'], feedback['date_short'])) - if post_count >= 4: break - + if post_count >= 4: + break + @classmethod def all(cls): feedbacks = cls.objects.all()[:4] @@ -350,28 +367,31 @@ class MAnalyticsFetcher(mongo.Document): total = mongo.FloatField() server = mongo.StringField() feed_code = mongo.IntField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'feed_fetches', - 'allow_inheritance': False, - 'indexes': ['date', 'feed_id', 'server', 'feed_code'], - 'ordering': ['date'], + "db_alias": "nbanalytics", + "collection": "feed_fetches", + "allow_inheritance": False, + "indexes": ["date", "feed_id", "server", "feed_code"], + "ordering": ["date"], } - + def __str__(self): - return "%s: %.4s+%.4s+%.4s+%.4s = %.4ss" % (self.feed_id, self.feed_fetch, - self.feed_process, - self.page, - self.icon, - self.total) - + return "%s: %.4s+%.4s+%.4s+%.4s = %.4ss" % ( + self.feed_id, + self.feed_fetch, + self.feed_process, + self.page, + self.icon, + self.total, + ) + @classmethod - def add(cls, feed_id, feed_fetch, feed_process, - page, icon, total, feed_code): + def add(cls, feed_id, feed_fetch, feed_process, page, icon, total, feed_code): server_name = settings.SERVER_NAME - if 'app' in server_name: return - + if "app" in server_name: + return + if icon and page: icon -= page if page and feed_process: @@ -380,12 +400,18 @@ def add(cls, feed_id, feed_fetch, feed_process, page -= feed_fetch if feed_process and feed_fetch: feed_process -= feed_fetch - - cls.objects.create(feed_id=feed_id, feed_fetch=feed_fetch, - feed_process=feed_process, - page=page, icon=icon, total=total, - server=server_name, feed_code=feed_code) - + + cls.objects.create( + feed_id=feed_id, + feed_fetch=feed_fetch, + feed_process=feed_process, + page=page, + icon=icon, + total=total, + server=server_name, + feed_code=feed_code, + ) + @classmethod def calculate_stats(cls, stats): return cls.aggregate(**stats) @@ -395,24 +421,24 @@ class MAnalyticsLoader(mongo.Document): date = mongo.DateTimeField(default=datetime.datetime.now) page_load = mongo.FloatField() server = mongo.StringField() - + meta = { - 'db_alias': 'nbanalytics', - 'collection': 'page_loads', - 'allow_inheritance': False, - 'indexes': ['date', 'server'], - 'ordering': ['date'], + "db_alias": "nbanalytics", + "collection": "page_loads", + "allow_inheritance": False, + "indexes": ["date", "server"], + "ordering": ["date"], } - + def __str__(self): return "%s: %.4ss" % (self.server, self.page_load) - + @classmethod def add(cls, page_load): server_name = settings.SERVER_NAME cls.objects.create(page_load=page_load, server=server_name) - + @classmethod def calculate_stats(cls, stats): return cls.aggregate(**stats) diff --git a/apps/statistics/rstats.py b/apps/statistics/rstats.py index e25a61f167..3b8785d759 100644 --- a/apps/statistics/rstats.py +++ b/apps/statistics/rstats.py @@ -6,87 +6,86 @@ class RStats: - STATS_TYPE = { - 'page_load': 'PLT', - 'feed_fetch': 'FFH', + "page_load": "PLT", + "feed_fetch": "FFH", } - + @classmethod def stats_type(cls, name): return cls.STATS_TYPE[name] - + @classmethod def add(cls, name, duration=None): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) pipe = r.pipeline() minute = round_time(round_to=60) - key = "%s:%s" % (cls.stats_type(name), minute.strftime('%s')) + key = "%s:%s" % (cls.stats_type(name), minute.strftime("%s")) pipe.incr("%s:s" % key) if duration: pipe.incrbyfloat("%s:a" % key, duration) pipe.expireat("%s:a" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) pipe.expireat("%s:s" % key, (minute + datetime.timedelta(days=2)).strftime("%s")) pipe.execute() - + @classmethod def clean_path(cls, path): if not path: return - - if path.startswith('/reader/feed/'): - path = '/reader/feed/' - elif path.startswith('/social/stories'): - path = '/social/stories/' - elif path.startswith('/reader/river_stories'): - path = '/reader/river_stories/' - elif path.startswith('/social/river_stories'): - path = '/social/river_stories/' - elif path.startswith('/reader/page/'): - path = '/reader/page/' - elif path.startswith('/api/check_share_on_site'): - path = '/api/check_share_on_site/' - + + if path.startswith("/reader/feed/"): + path = "/reader/feed/" + elif path.startswith("/social/stories"): + path = "/social/stories/" + elif path.startswith("/reader/river_stories"): + path = "/reader/river_stories/" + elif path.startswith("/social/river_stories"): + path = "/social/river_stories/" + elif path.startswith("/reader/page/"): + path = "/reader/page/" + elif path.startswith("/api/check_share_on_site"): + path = "/api/check_share_on_site/" + return path - + @classmethod def count(cls, name, hours=24): r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) stats_type = cls.stats_type(name) now = datetime.datetime.now() pipe = r.pipeline() - for minutes_ago in range(60*hours): + for minutes_ago in range(60 * hours): dt_min_ago = now - datetime.timedelta(minutes=minutes_ago) minute = round_time(dt=dt_min_ago, round_to=60) - key = "%s:%s" % (stats_type, minute.strftime('%s')) + key = "%s:%s" % (stats_type, minute.strftime("%s")) pipe.get("%s:s" % key) values = pipe.execute() total = sum(int(v) for v in values if v) return total - + @classmethod def sample(cls, sample=1000, pool=None): if not pool: pool = settings.REDIS_STORY_HASH_POOL - r = redis.Redis(connection_pool=pool) - keys = set() - errors = set() - prefixes = defaultdict(set) - sizes = defaultdict(int) + r = redis.Redis(connection_pool=pool) + keys = set() + errors = set() + prefixes = defaultdict(set) + sizes = defaultdict(int) prefixes_ttls = defaultdict(lambda: defaultdict(int)) - prefix_re = re.compile(r"(\w+):(.*)") + prefix_re = re.compile(r"(\w+):(.*)") - p = r.pipeline() + p = r.pipeline() [p.randomkey() for _ in range(sample)] - keys = set(p.execute()) + keys = set(p.execute()) - p = r.pipeline() + p = r.pipeline() [p.ttl(key) for key in keys] - ttls = p.execute() + ttls = p.execute() + + dump = [r.execute_command("dump", key) for key in keys] - dump = [r.execute_command('dump', key) for key in keys] - for k, key in enumerate(keys): match = prefix_re.match(key) if not match or dump[k] is None: @@ -96,39 +95,49 @@ def sample(cls, sample=1000, pool=None): prefixes[prefix].add(rest) sizes[prefix] += len(dump[k]) ttl = ttls[k] - if ttl < 0: # Never expire - prefixes_ttls[prefix]['-'] += 1 + if ttl < 0: # Never expire + prefixes_ttls[prefix]["-"] += 1 elif ttl == 0: - prefixes_ttls[prefix]['X'] += 1 - elif ttl < 60*60: # 1 hour - prefixes_ttls[prefix]['1h'] += 1 - elif ttl < 60*60*24: - prefixes_ttls[prefix]['1d'] += 1 - elif ttl < 60*60*24*7: - prefixes_ttls[prefix]['1w'] += 1 - elif ttl < 60*60*24*14: - prefixes_ttls[prefix]['2w'] += 1 - elif ttl < 60*60*24*30: - prefixes_ttls[prefix]['4w'] += 1 + prefixes_ttls[prefix]["X"] += 1 + elif ttl < 60 * 60: # 1 hour + prefixes_ttls[prefix]["1h"] += 1 + elif ttl < 60 * 60 * 24: + prefixes_ttls[prefix]["1d"] += 1 + elif ttl < 60 * 60 * 24 * 7: + prefixes_ttls[prefix]["1w"] += 1 + elif ttl < 60 * 60 * 24 * 14: + prefixes_ttls[prefix]["2w"] += 1 + elif ttl < 60 * 60 * 24 * 30: + prefixes_ttls[prefix]["4w"] += 1 else: - prefixes_ttls[prefix]['4w+'] += 1 - + prefixes_ttls[prefix]["4w+"] += 1 + keys_count = len(keys) total_size = float(sum([k for k in sizes.values()])) print(" ---> %s total keys" % keys_count) for prefix, rest in prefixes.items(): total_expiring = sum([k for p, k in dict(prefixes_ttls[prefix]).items() if p != "-"]) - print(" ---> %s: (%s keys - %s space) %s keys (%s expiring: %s)" % (str(prefix, 100. * (len(rest) / float(keys_count)))[:4], str(100 * (sizes[prefix] / total_size))[:4], str(len(rest))[:4], total_expiring, dict(prefixes_ttls[prefix]))) + print( + " ---> %s: (%s keys - %s space) %s keys (%s expiring: %s)" + % ( + str(prefix, 100.0 * (len(rest) / float(keys_count)))[:4], + str(100 * (sizes[prefix] / total_size))[:4], + str(len(rest))[:4], + total_expiring, + dict(prefixes_ttls[prefix]), + ) + ) print(" ---> %s errors: %s" % (len(errors), errors)) + def round_time(dt=None, round_to=60): - """Round a datetime object to any time laps in seconds - dt : datetime.datetime object, default now. - round_to : Closest number of seconds to round to, default 1 minute. - Author: Thierry Husson 2012 - Use it as you want but don't blame me. - """ - if dt == None : dt = datetime.datetime.now() - seconds = (dt - dt.min).seconds - rounding = (seconds+round_to/2) // round_to * round_to - return dt + datetime.timedelta(0,rounding-seconds,-dt.microsecond) - + """Round a datetime object to any time laps in seconds + dt : datetime.datetime object, default now. + round_to : Closest number of seconds to round to, default 1 minute. + Author: Thierry Husson 2012 - Use it as you want but don't blame me. + """ + if dt == None: + dt = datetime.datetime.now() + seconds = (dt - dt.min).seconds + rounding = (seconds + round_to / 2) // round_to * round_to + return dt + datetime.timedelta(0, rounding - seconds, -dt.microsecond) diff --git a/apps/statistics/tasks.py b/apps/statistics/tasks.py index b05a5108b7..79eb3ada9f 100644 --- a/apps/statistics/tasks.py +++ b/apps/statistics/tasks.py @@ -4,14 +4,13 @@ from utils import log as logging - -@app.task(name='collect-stats') +@app.task(name="collect-stats") def CollectStats(): logging.debug(" ---> ~FBCollecting stats...") MStatistics.collect_statistics() - - -@app.task(name='collect-feedback') + + +@app.task(name="collect-feedback") def CollectFeedback(): logging.debug(" ---> ~FBCollecting feedback...") MFeedback.collect_feedback() diff --git a/apps/statistics/templatetags/statistics_tags.py b/apps/statistics/templatetags/statistics_tags.py index 70015a8429..9961ef9c12 100644 --- a/apps/statistics/templatetags/statistics_tags.py +++ b/apps/statistics/templatetags/statistics_tags.py @@ -3,19 +3,22 @@ register = template.Library() -@register.inclusion_tag('statistics/render_statistics_graphs.xhtml') + +@register.inclusion_tag("statistics/render_statistics_graphs.xhtml") def render_statistics_graphs(statistics): return { - 'statistics': statistics, + "statistics": statistics, } - + + @register.filter def format_graph(n, max_value, height=30): if n == 0 or max_value == 0: return 1 - return max(1, height * (n/float(max_value))) - -@register.inclusion_tag('statistics/render_feedback_table.xhtml') + return max(1, height * (n / float(max_value))) + + +@register.inclusion_tag("statistics/render_feedback_table.xhtml") def render_feedback_table(): feedbacks = MFeedback.all() - return dict(feedbacks=feedbacks) \ No newline at end of file + return dict(feedbacks=feedbacks) diff --git a/apps/statistics/tests.py b/apps/statistics/tests.py index c7c4668e12..f51d798ffd 100644 --- a/apps/statistics/tests.py +++ b/apps/statistics/tests.py @@ -7,6 +7,7 @@ from django.test import TestCase + class SimpleTest(TestCase): def test_basic_addition(self): """ @@ -14,10 +15,12 @@ def test_basic_addition(self): """ self.assertEqual(1 + 1, 2) -__test__ = {"doctest": """ + +__test__ = { + "doctest": """ Another way to test that 1 + 1 is equal to 2. >>> 1 + 1 == 2 True -"""} - +""" +} diff --git a/apps/statistics/urls.py b/apps/statistics/urls.py index ee2ede961d..d0dc668816 100644 --- a/apps/statistics/urls.py +++ b/apps/statistics/urls.py @@ -2,8 +2,8 @@ from apps.statistics import views urlpatterns = [ - url(r'^dashboard_graphs', views.dashboard_graphs, name='statistics-graphs'), - url(r'^feedback_table', views.feedback_table, name='feedback-table'), - url(r'^revenue', views.revenue, name='revenue'), - url(r'^slow', views.slow, name='slow'), + url(r"^dashboard_graphs", views.dashboard_graphs, name="statistics-graphs"), + url(r"^feedback_table", views.feedback_table, name="feedback-table"), + url(r"^revenue", views.revenue, name="revenue"), + url(r"^slow", views.slow, name="slow"), ] diff --git a/apps/statistics/views.py b/apps/statistics/views.py index 8d769c07a2..0ce091b6bd 100644 --- a/apps/statistics/views.py +++ b/apps/statistics/views.py @@ -17,48 +17,47 @@ from apps.profile.models import PaymentHistory from utils import log as logging + def dashboard_graphs(request): statistics = MStatistics.all() - return render( - request, - 'statistics/render_statistics_graphs.xhtml', - {'statistics': statistics} - ) + return render(request, "statistics/render_statistics_graphs.xhtml", {"statistics": statistics}) + def feedback_table(request): feedbacks = MFeedback.all() - return render( - request, - 'statistics/render_feedback_table.xhtml', - {'feedbacks': feedbacks} - ) + return render(request, "statistics/render_feedback_table.xhtml", {"feedbacks": feedbacks}) + def revenue(request): data = {} - data['title'] = "NewsBlur Revenue" - data['link'] = "https://www.newsblur.com" - data['description'] = "Revenue" - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Revenue Writer' - data['docs'] = None + data["title"] = "NewsBlur Revenue" + data["link"] = "https://www.newsblur.com" + data["description"] = "Revenue" + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Revenue Writer" + data["docs"] = None rss = feedgenerator.Atom1Feed(**data) - + report = PaymentHistory.report() - content = "%s revenue: $%s
%s" % (datetime.datetime.now().strftime('%Y'), report['annual'], report['output'].replace('\n', '
')) - + content = "%s revenue: $%s
%s" % ( + datetime.datetime.now().strftime("%Y"), + report["annual"], + report["output"].replace("\n", "
"), + ) + story = { - 'title': "Daily snapshot: %s" % (datetime.datetime.now().strftime('%a %b %-d, %Y')), - 'link': 'https://www.newsblur.com', - 'description': content, - 'unique_id': datetime.datetime.now().strftime('%a %b %-d, %Y'), - 'pubdate': datetime.datetime.now(), + "title": "Daily snapshot: %s" % (datetime.datetime.now().strftime("%a %b %-d, %Y")), + "link": "https://www.newsblur.com", + "description": content, + "unique_id": datetime.datetime.now().strftime("%a %b %-d, %Y"), + "pubdate": datetime.datetime.now(), } rss.add_item(**story) - - logging.user(request, "~FBGenerating Revenue RSS feed: ~FM%s" % ( - request.META.get('HTTP_USER_AGENT', "")[:24] - )) - return HttpResponse(rss.writeString('utf-8'), content_type='application/rss+xml') + + logging.user( + request, "~FBGenerating Revenue RSS feed: ~FM%s" % (request.META.get("HTTP_USER_AGENT", "")[:24]) + ) + return HttpResponse(rss.writeString("utf-8"), content_type="application/rss+xml") @login_required @@ -74,8 +73,8 @@ def slow(request): user_id_counts = {} path_counts = {} users = {} - - for minutes_ago in range(60*6): + + for minutes_ago in range(60 * 6): dt_ago = now - datetime.timedelta(minutes=minutes_ago) minute = round_time(dt_ago, round_to=60) dt_ago_str = minute.strftime("%a %b %-d, %Y %H:%M") @@ -83,7 +82,7 @@ def slow(request): minute_queries = r.lrange(name, 0, -1) for query_raw in minute_queries: query = pickle.loads(base64.b64decode(query_raw)) - user_id = query['user_id'] + user_id = query["user_id"] if dt_ago_str not in all_queries: all_queries[dt_ago_str] = [] if user_id in users: @@ -97,22 +96,26 @@ def slow(request): else: user = AnonymousUser() users[user_id] = user - query['user'] = user - query['datetime'] = minute + query["user"] = user + query["datetime"] = minute all_queries[dt_ago_str].append(query) if user_id not in user_id_counts: user_id_counts[user_id] = 0 user_id_counts[user_id] += 1 - if query['path'] not in path_counts: - path_counts[query['path']] = 0 - path_counts[query['path']] += 1 + if query["path"] not in path_counts: + path_counts[query["path"]] = 0 + path_counts[query["path"]] += 1 user_counts = [] for user_id, count in user_id_counts.items(): - user_counts.append({'user': users[user_id], 'count': count}) - - return render(request, 'statistics/slow.xhtml', { - 'all_queries': all_queries, - 'user_counts': user_counts, - 'path_counts': path_counts, - }) + user_counts.append({"user": users[user_id], "count": count}) + + return render( + request, + "statistics/slow.xhtml", + { + "all_queries": all_queries, + "user_counts": user_counts, + "path_counts": path_counts, + }, + ) diff --git a/archive/ansible/do_inventory.py b/archive/ansible/do_inventory.py index 3cfa63e33a..1766fb19f9 100755 --- a/archive/ansible/do_inventory.py +++ b/archive/ansible/do_inventory.py @@ -121,7 +121,8 @@ # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) -from __future__ import (absolute_import, division, print_function) +from __future__ import absolute_import, division, print_function + __metaclass__ = type ###################################################################### @@ -145,21 +146,23 @@ class DoManager: def __init__(self, api_token): self.api_token = api_token - self.api_endpoint = 'https://api.digitalocean.com/v2' - self.headers = {'Authorization': 'Bearer {0}'.format(self.api_token), - 'Content-type': 'application/json'} + self.api_endpoint = "https://api.digitalocean.com/v2" + self.headers = { + "Authorization": "Bearer {0}".format(self.api_token), + "Content-type": "application/json", + } self.timeout = 60 def _url_builder(self, path): - if path[0] == '/': + if path[0] == "/": path = path[1:] - return '%s/%s' % (self.api_endpoint, path) + return "%s/%s" % (self.api_endpoint, path) - def send(self, url, method='GET', data=None): + def send(self, url, method="GET", data=None): url = self._url_builder(url) data = json.dumps(data) try: - if method == 'GET': + if method == "GET": resp_data = {} incomplete = True while incomplete: @@ -173,7 +176,7 @@ def send(self, url, method='GET', data=None): resp_data[key] = value try: - url = json_resp['links']['pages']['next'] + url = json_resp["links"]["pages"]["next"] except KeyError: incomplete = False @@ -182,54 +185,53 @@ def send(self, url, method='GET', data=None): return resp_data def all_active_droplets(self): - resp = self.send('droplets/') - return resp['droplets'] + resp = self.send("droplets/") + return resp["droplets"] def all_regions(self): - resp = self.send('regions/') - return resp['regions'] + resp = self.send("regions/") + return resp["regions"] - def all_images(self, filter_name='global'): - params = {'filter': filter_name} - resp = self.send('images/', data=params) - return resp['images'] + def all_images(self, filter_name="global"): + params = {"filter": filter_name} + resp = self.send("images/", data=params) + return resp["images"] def sizes(self): - resp = self.send('sizes/') - return resp['sizes'] + resp = self.send("sizes/") + return resp["sizes"] def all_ssh_keys(self): - resp = self.send('account/keys') - return resp['ssh_keys'] + resp = self.send("account/keys") + return resp["ssh_keys"] def all_domains(self): - resp = self.send('domains/') - return resp['domains'] + resp = self.send("domains/") + return resp["domains"] def show_droplet(self, droplet_id): - resp = self.send('droplets/%s' % droplet_id) - return resp['droplet'] + resp = self.send("droplets/%s" % droplet_id) + return resp["droplet"] def all_tags(self): - resp = self.send('tags') - return resp['tags'] + resp = self.send("tags") + return resp["tags"] class DigitalOceanInventory(object): - ########################################################################### # Main execution path ########################################################################### def __init__(self): - """Main execution path """ + """Main execution path""" # DigitalOceanInventory data self.data = {} # All DigitalOcean data self.inventory = {} # Ansible Inventory # Define defaults - self.cache_path = '.' + self.cache_path = "." self.cache_max_age = 0 self.use_private_network = False self.group_variables = {} @@ -240,9 +242,11 @@ def __init__(self): self.read_cli_args() # Verify credentials were set - if not hasattr(self, 'api_token'): - msg = 'Could not find values for DigitalOcean api_token. They must be specified via either ini file, ' \ - 'command line argument (--api-token), or environment variables (DO_API_TOKEN)\n' + if not hasattr(self, "api_token"): + msg = ( + "Could not find values for DigitalOcean api_token. They must be specified via either ini file, " + "command line argument (--api-token), or environment variables (DO_API_TOKEN)\n" + ) sys.stderr.write(msg) sys.exit(-1) @@ -259,40 +263,40 @@ def __init__(self): self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: - sys.stderr.write('Cache is empty and --force-cache was specified\n') + sys.stderr.write("Cache is empty and --force-cache was specified\n") sys.exit(-1) self.manager = DoManager(self.api_token) # Pick the json_data to print based on the CLI command if self.args.droplets: - self.load_from_digital_ocean('droplets') - json_data = {'droplets': self.data['droplets']} + self.load_from_digital_ocean("droplets") + json_data = {"droplets": self.data["droplets"]} elif self.args.regions: - self.load_from_digital_ocean('regions') - json_data = {'regions': self.data['regions']} + self.load_from_digital_ocean("regions") + json_data = {"regions": self.data["regions"]} elif self.args.images: - self.load_from_digital_ocean('images') - json_data = {'images': self.data['images']} + self.load_from_digital_ocean("images") + json_data = {"images": self.data["images"]} elif self.args.sizes: - self.load_from_digital_ocean('sizes') - json_data = {'sizes': self.data['sizes']} + self.load_from_digital_ocean("sizes") + json_data = {"sizes": self.data["sizes"]} elif self.args.ssh_keys: - self.load_from_digital_ocean('ssh_keys') - json_data = {'ssh_keys': self.data['ssh_keys']} + self.load_from_digital_ocean("ssh_keys") + json_data = {"ssh_keys": self.data["ssh_keys"]} elif self.args.domains: - self.load_from_digital_ocean('domains') - json_data = {'domains': self.data['domains']} + self.load_from_digital_ocean("domains") + json_data = {"domains": self.data["domains"]} elif self.args.tags: - self.load_from_digital_ocean('tags') - json_data = {'tags': self.data['tags']} + self.load_from_digital_ocean("tags") + json_data = {"tags": self.data["tags"]} elif self.args.all: self.load_from_digital_ocean() json_data = self.data elif self.args.host: json_data = self.load_droplet_variables_for_host() - else: # '--list' this is last to make it default - self.load_from_digital_ocean('droplets') + else: # '--list' this is last to make it default + self.load_from_digital_ocean("droplets") self.build_inventory() json_data = self.inventory @@ -309,31 +313,31 @@ def __init__(self): ########################################################################### def read_settings(self): - """ Reads the settings from the digital_ocean.ini file """ + """Reads the settings from the digital_ocean.ini file""" config = ConfigParser.ConfigParser() - config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'digital_ocean.ini') + config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "digital_ocean.ini") config.read(config_path) # Credentials - if config.has_option('digital_ocean', 'api_token'): - self.api_token = config.get('digital_ocean', 'api_token') + if config.has_option("digital_ocean", "api_token"): + self.api_token = config.get("digital_ocean", "api_token") # Cache related - if config.has_option('digital_ocean', 'cache_path'): - self.cache_path = config.get('digital_ocean', 'cache_path') - if config.has_option('digital_ocean', 'cache_max_age'): - self.cache_max_age = config.getint('digital_ocean', 'cache_max_age') + if config.has_option("digital_ocean", "cache_path"): + self.cache_path = config.get("digital_ocean", "cache_path") + if config.has_option("digital_ocean", "cache_max_age"): + self.cache_max_age = config.getint("digital_ocean", "cache_max_age") # Private IP Address - if config.has_option('digital_ocean', 'use_private_network'): - self.use_private_network = config.getboolean('digital_ocean', 'use_private_network') + if config.has_option("digital_ocean", "use_private_network"): + self.use_private_network = config.getboolean("digital_ocean", "use_private_network") # Group variables - if config.has_option('digital_ocean', 'group_variables'): - self.group_variables = ast.literal_eval(config.get('digital_ocean', 'group_variables')) + if config.has_option("digital_ocean", "group_variables"): + self.group_variables = ast.literal_eval(config.get("digital_ocean", "group_variables")) def read_environment(self): - """ Reads the settings from environment variables """ + """Reads the settings from environment variables""" # Setup credentials if os.getenv("DO_API_TOKEN"): self.api_token = os.getenv("DO_API_TOKEN") @@ -341,31 +345,48 @@ def read_environment(self): self.api_token = os.getenv("DO_API_KEY") def read_cli_args(self): - """ Command line argument processing """ - parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') - - parser.add_argument('--list', action='store_true', help='List all active Droplets as Ansible inventory (default: True)') - parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') - - parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') - parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON') - parser.add_argument('--regions', action='store_true', help='List Regions as JSON') - parser.add_argument('--images', action='store_true', help='List Images as JSON') - parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') - parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') - parser.add_argument('--domains', action='store_true', help='List Domains as JSON') - parser.add_argument('--tags', action='store_true', help='List Tags as JSON') - - parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') - - parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') - parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') - parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') - parser.add_argument('--refresh-cache', '-r', action='store_true', default=False, - help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') - - parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN') - parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token') + """Command line argument processing""" + parser = argparse.ArgumentParser( + description="Produce an Ansible Inventory file based on DigitalOcean credentials" + ) + + parser.add_argument( + "--list", + action="store_true", + help="List all active Droplets as Ansible inventory (default: True)", + ) + parser.add_argument( + "--host", action="store", help="Get all Ansible inventory variables about a specific Droplet" + ) + + parser.add_argument("--all", action="store_true", help="List all DigitalOcean information as JSON") + parser.add_argument("--droplets", "-d", action="store_true", help="List Droplets as JSON") + parser.add_argument("--regions", action="store_true", help="List Regions as JSON") + parser.add_argument("--images", action="store_true", help="List Images as JSON") + parser.add_argument("--sizes", action="store_true", help="List Sizes as JSON") + parser.add_argument("--ssh-keys", action="store_true", help="List SSH keys as JSON") + parser.add_argument("--domains", action="store_true", help="List Domains as JSON") + parser.add_argument("--tags", action="store_true", help="List Tags as JSON") + + parser.add_argument("--pretty", "-p", action="store_true", help="Pretty-print results") + + parser.add_argument("--cache-path", action="store", help="Path to the cache files (default: .)") + parser.add_argument( + "--cache-max_age", action="store", help="Maximum age of the cached items (default: 0)" + ) + parser.add_argument( + "--force-cache", action="store_true", default=False, help="Only use data from the cache" + ) + parser.add_argument( + "--refresh-cache", + "-r", + action="store_true", + default=False, + help="Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)", + ) + + parser.add_argument("--env", "-e", action="store_true", help="Display DO_API_TOKEN") + parser.add_argument("--api-token", "-a", action="store", help="DigitalOcean API Token") self.args = parser.parse_args() @@ -373,11 +394,17 @@ def read_cli_args(self): self.api_token = self.args.api_token # Make --list default if none of the other commands are specified - if (not self.args.droplets and not self.args.regions and - not self.args.images and not self.args.sizes and - not self.args.ssh_keys and not self.args.domains and - not self.args.tags and - not self.args.all and not self.args.host): + if ( + not self.args.droplets + and not self.args.regions + and not self.args.images + and not self.args.sizes + and not self.args.ssh_keys + and not self.args.domains + and not self.args.tags + and not self.args.all + and not self.args.host + ): self.args.list = True ########################################################################### @@ -385,117 +412,112 @@ def read_cli_args(self): ########################################################################### def load_from_digital_ocean(self, resource=None): - """Get JSON from DigitalOcean API """ + """Get JSON from DigitalOcean API""" if self.args.force_cache and os.path.isfile(self.cache_filename): return # We always get fresh droplets - if self.is_cache_valid() and not (resource == 'droplets' or resource is None): + if self.is_cache_valid() and not (resource == "droplets" or resource is None): return if self.args.refresh_cache: resource = None - if resource == 'droplets' or resource is None: - self.data['droplets'] = self.manager.all_active_droplets() + if resource == "droplets" or resource is None: + self.data["droplets"] = self.manager.all_active_droplets() self.cache_refreshed = True - if resource == 'regions' or resource is None: - self.data['regions'] = self.manager.all_regions() + if resource == "regions" or resource is None: + self.data["regions"] = self.manager.all_regions() self.cache_refreshed = True - if resource == 'images' or resource is None: - self.data['images'] = self.manager.all_images() + if resource == "images" or resource is None: + self.data["images"] = self.manager.all_images() self.cache_refreshed = True - if resource == 'sizes' or resource is None: - self.data['sizes'] = self.manager.sizes() + if resource == "sizes" or resource is None: + self.data["sizes"] = self.manager.sizes() self.cache_refreshed = True - if resource == 'ssh_keys' or resource is None: - self.data['ssh_keys'] = self.manager.all_ssh_keys() + if resource == "ssh_keys" or resource is None: + self.data["ssh_keys"] = self.manager.all_ssh_keys() self.cache_refreshed = True - if resource == 'domains' or resource is None: - self.data['domains'] = self.manager.all_domains() + if resource == "domains" or resource is None: + self.data["domains"] = self.manager.all_domains() self.cache_refreshed = True - if resource == 'tags' or resource is None: - self.data['tags'] = self.manager.all_tags() + if resource == "tags" or resource is None: + self.data["tags"] = self.manager.all_tags() self.cache_refreshed = True def add_inventory_group(self, key): - """ Method to create group dict """ - host_dict = {'hosts': [], 'vars': {}} + """Method to create group dict""" + host_dict = {"hosts": [], "vars": {}} self.inventory[key] = host_dict return def add_host(self, group, host): - """ Helper method to reduce host duplication """ + """Helper method to reduce host duplication""" if group not in self.inventory: self.add_inventory_group(group) - if host not in self.inventory[group]['hosts']: - self.inventory[group]['hosts'].append(host) + if host not in self.inventory[group]["hosts"]: + self.inventory[group]["hosts"].append(host) return def build_inventory(self): - """ Build Ansible inventory of droplets """ - self.inventory = { - 'all': { - 'hosts': [], - 'vars': self.group_variables - }, - '_meta': {'hostvars': {}} - } + """Build Ansible inventory of droplets""" + self.inventory = {"all": {"hosts": [], "vars": self.group_variables}, "_meta": {"hostvars": {}}} # add all droplets by id and name - for droplet in self.data['droplets']: - for net in droplet['networks']['v4']: - if net['type'] == 'public': - dest = net['ip_address'] + for droplet in self.data["droplets"]: + for net in droplet["networks"]["v4"]: + if net["type"] == "public": + dest = net["ip_address"] else: continue - self.inventory['all']['hosts'].append(dest) + self.inventory["all"]["hosts"].append(dest) - self.add_host(droplet['id'], dest) + self.add_host(droplet["id"], dest) - self.add_host(droplet['name'], dest) + self.add_host(droplet["name"], dest) - roledef = re.split(r"([0-9]+)", droplet['name'])[0] + roledef = re.split(r"([0-9]+)", droplet["name"])[0] self.add_host(roledef, dest) # groups that are always present - for group in ('digital_ocean', - 'region_' + droplet['region']['slug'], - 'image_' + str(droplet['image']['id']), - 'size_' + droplet['size']['slug'], - 'distro_' + DigitalOceanInventory.to_safe(droplet['image']['distribution']), - 'status_' + droplet['status']): + for group in ( + "digital_ocean", + "region_" + droplet["region"]["slug"], + "image_" + str(droplet["image"]["id"]), + "size_" + droplet["size"]["slug"], + "distro_" + DigitalOceanInventory.to_safe(droplet["image"]["distribution"]), + "status_" + droplet["status"], + ): # self.add_host(group, dest) pass # groups that are not always present - for group in (droplet['image']['slug'], - droplet['image']['name']): + for group in (droplet["image"]["slug"], droplet["image"]["name"]): if group: - image = 'image_' + DigitalOceanInventory.to_safe(group) + image = "image_" + DigitalOceanInventory.to_safe(group) # self.add_host(image, dest) - if droplet['tags']: - for tag in droplet['tags']: + if droplet["tags"]: + for tag in droplet["tags"]: self.add_host(tag, dest) # hostvars info = self.do_namespace(droplet) - self.inventory['_meta']['hostvars'][dest] = info + self.inventory["_meta"]["hostvars"][dest] = info def load_droplet_variables_for_host(self): - """ Generate a JSON response to a --host call """ + """Generate a JSON response to a --host call""" host = int(self.args.host) droplet = self.manager.show_droplet(host) info = self.do_namespace(droplet) - return {'droplet': info} + return {"droplet": info} ########################################################################### # Cache Management ########################################################################### def is_cache_valid(self): - """ Determines if the cache files have expired, or if it is still valid """ + """Determines if the cache files have expired, or if it is still valid""" if os.path.isfile(self.cache_filename): mod_time = os.path.getmtime(self.cache_filename) current_time = time() @@ -504,23 +526,23 @@ def is_cache_valid(self): return False def load_from_cache(self): - """ Reads the data from the cache file and assigns it to member variables as Python Objects """ + """Reads the data from the cache file and assigns it to member variables as Python Objects""" try: - with open(self.cache_filename, 'r') as cache: + with open(self.cache_filename, "r") as cache: json_data = cache.read() data = json.loads(json_data) except IOError: - data = {'data': {}, 'inventory': {}} + data = {"data": {}, "inventory": {}} - self.data = data['data'] - self.inventory = data['inventory'] + self.data = data["data"] + self.inventory = data["inventory"] def write_to_cache(self): - """ Writes data in JSON format to a file """ - data = {'data': self.data, 'inventory': self.inventory} + """Writes data in JSON format to a file""" + data = {"data": self.data, "inventory": self.inventory} json_data = json.dumps(data, indent=2) - with open(self.cache_filename, 'w') as cache: + with open(self.cache_filename, "w") as cache: cache.write(json_data) ########################################################################### @@ -528,15 +550,15 @@ def write_to_cache(self): ########################################################################### @staticmethod def to_safe(word): - """ Converts 'bad' characters in a string to underscores so they can be used as Ansible groups """ + """Converts 'bad' characters in a string to underscores so they can be used as Ansible groups""" return re.sub(r"[^A-Za-z0-9\-.]", "_", word) @staticmethod def do_namespace(data): - """ Returns a copy of the dictionary with all the keys put in a 'do_' namespace """ + """Returns a copy of the dictionary with all the keys put in a 'do_' namespace""" info = {} for k, v in data.items(): - info['do_' + k] = v + info["do_" + k] = v return info diff --git a/archive/fabfile.py b/archive/fabfile.py index 1c35d76f9d..e8415cbfdc 100644 --- a/archive/fabfile.py +++ b/archive/fabfile.py @@ -4,6 +4,7 @@ from fabric.contrib import django from fabric.contrib import files from fabric.state import connections + # from fabric.colors import red, green, blue, cyan, magenta, white, yellow from boto.s3.connection import S3Connection from boto.s3.key import Key @@ -25,7 +26,7 @@ print("Digital Ocean's API not loaded. Install python-digitalocean.") -django.settings_module('newsblur_web.settings') +django.settings_module("newsblur_web.settings") try: from django.conf import settings as django_settings except ImportError: @@ -37,10 +38,10 @@ # ============ env.NEWSBLUR_PATH = "/srv/newsblur" -env.SECRETS_PATH = "/srv/secrets-newsblur" -env.VENDOR_PATH = "/srv/code" -env.user = 'sclay' -env.key_filename = os.path.join(env.SECRETS_PATH, 'keys/newsblur.key') +env.SECRETS_PATH = "/srv/secrets-newsblur" +env.VENDOR_PATH = "/srv/code" +env.user = "sclay" +env.key_filename = os.path.join(env.SECRETS_PATH, "keys/newsblur.key") env.connection_attempts = 10 env.do_ip_to_hostname = {} env.colorize_errors = True @@ -50,7 +51,7 @@ # ========= try: - hosts_path = os.path.expanduser(os.path.join(env.SECRETS_PATH, 'configs/hosts.yml')) + hosts_path = os.path.expanduser(os.path.join(env.SECRETS_PATH, "configs/hosts.yml")) roles = yaml.load(open(hosts_path)) for role_name, hosts in list(roles.items()): if isinstance(hosts, dict): @@ -59,11 +60,12 @@ except: print(" ***> No role definitions found in %s. Using default roles." % hosts_path) env.roledefs = { - 'app' : ['app01.newsblur.com'], - 'db' : ['db01.newsblur.com'], - 'task' : ['task01.newsblur.com'], + "app": ["app01.newsblur.com"], + "db": ["db01.newsblur.com"], + "task": ["task01.newsblur.com"], } + def do_roledefs(split=False, debug=False): doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) droplets = doapi.get_all_droplets() @@ -76,7 +78,7 @@ def do_roledefs(split=False, debug=False): if roledef not in hostnames: hostnames[roledef] = [] if droplet.ip_address not in hostnames[roledef]: - hostnames[roledef].append({'name': droplet.name, 'address': droplet.ip_address}) + hostnames[roledef].append({"name": droplet.name, "address": droplet.ip_address}) env.do_ip_to_hostname[droplet.ip_address] = droplet.name if droplet.ip_address not in env.roledefs[roledef]: env.roledefs[roledef].append(droplet.ip_address) @@ -85,6 +87,7 @@ def do_roledefs(split=False, debug=False): return hostnames return droplets + def list_do(): droplets = assign_digitalocean_roledefs(split=True) pprint(droplets) @@ -94,7 +97,7 @@ def list_do(): # for server in group: # if 'address' in server: # print(server['address']) - + doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) droplets = doapi.get_all_droplets() sizes = doapi.get_all_sizes() @@ -103,31 +106,35 @@ def list_do(): total_cost = 0 for droplet in droplets: roledef = re.split(r"([0-9]+)", droplet.name)[0] - cost = droplet.size['price_monthly'] + cost = droplet.size["price_monthly"] role_costs[roledef] += cost total_cost += cost - + print("\n\n Costs:") pprint(dict(role_costs)) print(" ---> Total cost: $%s/month" % total_cost) - + + def host(*names): env.hosts = [] - env.doname = ','.join(names) + env.doname = ",".join(names) hostnames = assign_digitalocean_roledefs(split=True) for role, hosts in list(hostnames.items()): for host in hosts: - if isinstance(host, dict) and host['name'] in names: - env.hosts.append(host['address']) + if isinstance(host, dict) and host["name"] in names: + env.hosts.append(host["address"]) print(" ---> Using %s as hosts" % env.hosts) - + + # ================ # = Environments = # ================ + def server(): env.NEWSBLUR_PATH = "/srv/newsblur" - env.VENDOR_PATH = "/srv/code" + env.VENDOR_PATH = "/srv/code" + def assign_digitalocean_roledefs(split=False): server() @@ -136,66 +143,81 @@ def assign_digitalocean_roledefs(split=False): for roledef, hosts in list(env.roledefs.items()): if roledef not in droplets: droplets[roledef] = hosts - + return droplets + def app(): assign_digitalocean_roledefs() - env.roles = ['app'] + env.roles = ["app"] + def web(): assign_digitalocean_roledefs() - env.roles = ['app', 'push', 'work', 'search'] + env.roles = ["app", "push", "work", "search"] + def work(): assign_digitalocean_roledefs() - env.roles = ['work'] + env.roles = ["work"] + def www(): assign_digitalocean_roledefs() - env.roles = ['www'] + env.roles = ["www"] + def dev(): assign_digitalocean_roledefs() - env.roles = ['dev'] + env.roles = ["dev"] + def debug(): assign_digitalocean_roledefs() - env.roles = ['debug'] + env.roles = ["debug"] + def node(): assign_digitalocean_roledefs() - env.roles = ['node'] + env.roles = ["node"] + def push(): assign_digitalocean_roledefs() - env.roles = ['push'] + env.roles = ["push"] + def db(): assign_digitalocean_roledefs() - env.roles = ['db', 'search'] + env.roles = ["db", "search"] + def task(): assign_digitalocean_roledefs() - env.roles = ['task'] + env.roles = ["task"] + def ec2task(): ec2() - env.roles = ['ec2task'] + env.roles = ["ec2task"] + def ec2(): - env.user = 'ubuntu' - env.key_filename = ['/Users/sclay/.ec2/sclay.pem'] + env.user = "ubuntu" + env.key_filename = ["/Users/sclay/.ec2/sclay.pem"] assign_digitalocean_roledefs() + def all(): assign_digitalocean_roledefs() - env.roles = ['app', 'db', 'debug', 'node', 'push', 'work', 'www', 'search'] + env.roles = ["app", "db", "debug", "node", "push", "work", "www", "search"] + # ============= # = Bootstrap = # ============= + def setup_common(): setup_installs() change_shell() @@ -224,17 +246,19 @@ def setup_common(): setup_nginx() setup_munin() + def setup_all(): setup_common() setup_app(skip_common=True) setup_db(skip_common=True) setup_task(skip_common=True) + def setup_app_docker(skip_common=False): if not skip_common: setup_common() setup_app_firewall() - setup_motd('app') + setup_motd("app") change_shell() setup_user() @@ -248,13 +272,14 @@ def setup_app_docker(skip_common=False): setup_docker() done() - sudo('reboot') + sudo("reboot") + def setup_app(skip_common=False, node=False): if not skip_common: setup_common() setup_app_firewall() - setup_motd('app') + setup_motd("app") copy_app_settings() config_nginx() setup_gunicorn(supervisor=True) @@ -264,7 +289,8 @@ def setup_app(skip_common=False, node=False): config_monit_app() setup_usage_monitor() done() - sudo('reboot') + sudo("reboot") + def setup_app_image(): copy_app_settings() @@ -274,17 +300,19 @@ def setup_app_image(): pip() deploy_web() done() - sudo('reboot') + sudo("reboot") + def setup_node(): setup_node_app() config_node(full=True) - + + def setup_db(engine=None, skip_common=False, skip_benchmark=False): if not skip_common: setup_common() setup_db_firewall() - setup_motd('db') + setup_motd("db") copy_db_settings() if engine == "postgres": setup_postgres(standby=False) @@ -316,18 +344,20 @@ def setup_db(engine=None, skip_common=False, skip_benchmark=False): # if env.user == 'ubuntu': # setup_db_mdadm() + def setup_task(queue=None, skip_common=False): if not skip_common: setup_common() setup_task_firewall() - setup_motd('task') + setup_motd("task") copy_task_settings() enable_celery_supervisor(queue) setup_gunicorn(supervisor=False) config_monit_task() setup_usage_monitor() done() - sudo('reboot') + sudo("reboot") + def setup_task_image(): setup_installs() @@ -338,198 +368,229 @@ def setup_task_image(): pip() deploy(reload=True) done() - sudo('reboot') + sudo("reboot") + # ================== # = Setup - Docker = # ================== + def setup_docker(): packages = [ - 'build-essential', + "build-essential", ] - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' % ' '.join(packages)) + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y --force-yes -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' + % " ".join(packages) + ) - sudo('apt install -fy docker docker-compose') - sudo('usermod -aG docker ${USER}') - sudo('su - ${USER}') + sudo("apt install -fy docker docker-compose") + sudo("usermod -aG docker ${USER}") + sudo("su - ${USER}") copy_certificates() - + + # ================== # = Setup - Common = # ================== + def done(): print("\n\n\n\n-----------------------------------------------------") - print("\n\n %s / %s IS SUCCESSFULLY BOOTSTRAPPED" % (env.get('doname') or env.host_string, env.host_string)) + print( + "\n\n %s / %s IS SUCCESSFULLY BOOTSTRAPPED" + % (env.get("doname") or env.host_string, env.host_string) + ) print("\n\n-----------------------------------------------------\n\n\n\n") + def setup_installs(): packages = [ - 'build-essential', - 'gcc', - 'scons', - 'libreadline-dev', - 'sysstat', - 'iotop', - 'git', - 'python2', - 'python2.7-dev', - 'locate', - 'software-properties-common', - 'libpcre3-dev', - 'libncurses5-dev', - 'libdbd-pg-perl', - 'libssl-dev', - 'libffi-dev', - 'libevent-dev', - 'make', - 'postgresql-common', - 'ssl-cert', - 'python-setuptools', - 'libyaml-0-2', - 'pgbouncer', - 'python-yaml', - 'python-numpy', - 'curl', - 'monit', - 'ufw', - 'libjpeg8', - 'libjpeg62-dev', - 'libfreetype6', - 'libfreetype6-dev', - 'libmysqlclient-dev', - 'libblas-dev', - 'liblapack-dev', - 'libatlas-base-dev', - 'gfortran', - 'libpq-dev', + "build-essential", + "gcc", + "scons", + "libreadline-dev", + "sysstat", + "iotop", + "git", + "python2", + "python2.7-dev", + "locate", + "software-properties-common", + "libpcre3-dev", + "libncurses5-dev", + "libdbd-pg-perl", + "libssl-dev", + "libffi-dev", + "libevent-dev", + "make", + "postgresql-common", + "ssl-cert", + "python-setuptools", + "libyaml-0-2", + "pgbouncer", + "python-yaml", + "python-numpy", + "curl", + "monit", + "ufw", + "libjpeg8", + "libjpeg62-dev", + "libfreetype6", + "libfreetype6-dev", + "libmysqlclient-dev", + "libblas-dev", + "liblapack-dev", + "libatlas-base-dev", + "gfortran", + "libpq-dev", ] # sudo("sed -i -e 's/archive.ubuntu.com\|security.ubuntu.com/old-releases.ubuntu.com/g' /etc/apt/sources.list") put("config/apt_sources.conf", "/etc/apt/sources.list", use_sudo=True) - run('sleep 10') # Dies on a lock, so just delay - sudo('apt-get -y update') - run('sleep 10') # Dies on a lock, so just delay - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" dist-upgrade') - run('sleep 10') # Dies on a lock, so just delay - sudo('DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' % ' '.join(packages)) - + run("sleep 10") # Dies on a lock, so just delay + sudo("apt-get -y update") + run("sleep 10") # Dies on a lock, so just delay + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" dist-upgrade' + ) + run("sleep 10") # Dies on a lock, so just delay + sudo( + 'DEBIAN_FRONTEND=noninteractive apt-get -y -o Dpkg::Options::="--force-confdef" -o Dpkg::Options::="--force-confold" install %s' + % " ".join(packages) + ) + with settings(warn_only=True): sudo("ln -s /usr/lib/x86_64-linux-gnu/libjpeg.so /usr/lib") sudo("ln -s /usr/lib/x86_64-linux-gnu/libfreetype.so /usr/lib") sudo("ln -s /usr/lib/x86_64-linux-gnu/libz.so /usr/lib") - + with settings(warn_only=True): - sudo('mkdir -p %s' % env.VENDOR_PATH) - sudo('chown %s.%s %s' % (env.user, env.user, env.VENDOR_PATH)) + sudo("mkdir -p %s" % env.VENDOR_PATH) + sudo("chown %s.%s %s" % (env.user, env.user, env.VENDOR_PATH)) + def change_shell(): - sudo('apt-get -fy install zsh') + sudo("apt-get -fy install zsh") with settings(warn_only=True): - run('git clone git://github.com/robbyrussell/oh-my-zsh.git ~/.oh-my-zsh') - run('git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting') - sudo('chsh %s -s /bin/zsh' % env.user) + run("git clone git://github.com/robbyrussell/oh-my-zsh.git ~/.oh-my-zsh") + run( + "git clone https://github.com/zsh-users/zsh-syntax-highlighting.git ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-syntax-highlighting" + ) + sudo("chsh %s -s /bin/zsh" % env.user) + def setup_user(): # run('useradd -c "NewsBlur" -m newsblur -s /bin/zsh') # run('openssl rand -base64 8 | tee -a ~conesus/.password | passwd -stdin conesus') - run('mkdir -p ~/.ssh && chmod 700 ~/.ssh') - run('rm -fr ~/.ssh/id_dsa*') + run("mkdir -p ~/.ssh && chmod 700 ~/.ssh") + run("rm -fr ~/.ssh/id_dsa*") run('ssh-keygen -t dsa -f ~/.ssh/id_dsa -N ""') - run('touch ~/.ssh/authorized_keys') + run("touch ~/.ssh/authorized_keys") put("~/.ssh/id_dsa.pub", "authorized_keys") - run("echo \"\n\" >> ~sclay/.ssh/authorized_keys") - run('echo `cat authorized_keys` >> ~sclay/.ssh/authorized_keys') - run('rm authorized_keys') - -def copy_ssh_keys(username='sclay', private=False): - sudo('mkdir -p ~%s/.ssh' % username) - - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key.pub'), 'local.key.pub') - sudo('mv local.key.pub ~%s/.ssh/id_rsa.pub' % username) + run('echo "\n" >> ~sclay/.ssh/authorized_keys') + run("echo `cat authorized_keys` >> ~sclay/.ssh/authorized_keys") + run("rm authorized_keys") + + +def copy_ssh_keys(username="sclay", private=False): + sudo("mkdir -p ~%s/.ssh" % username) + + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key.pub"), "local.key.pub") + sudo("mv local.key.pub ~%s/.ssh/id_rsa.pub" % username) if private: - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key'), 'local.key') - sudo('mv local.key ~%s/.ssh/id_rsa' % username) - - sudo("echo \"\n\" >> ~%s/.ssh/authorized_keys" % username) + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key"), "local.key") + sudo("mv local.key ~%s/.ssh/id_rsa" % username) + + sudo('echo "\n" >> ~%s/.ssh/authorized_keys' % username) sudo("echo `cat ~%s/.ssh/id_rsa.pub` >> ~%s/.ssh/authorized_keys" % (username, username)) - sudo('chown -R %s.%s ~%s/.ssh' % (username, username, username)) - sudo('chmod 700 ~%s/.ssh' % username) - sudo('chmod 600 ~%s/.ssh/id_rsa*' % username) + sudo("chown -R %s.%s ~%s/.ssh" % (username, username, username)) + sudo("chmod 700 ~%s/.ssh" % username) + sudo("chmod 600 ~%s/.ssh/id_rsa*" % username) + def setup_repo(): - sudo('mkdir -p /srv') - sudo('chown -R %s.%s /srv' % (env.user, env.user)) + sudo("mkdir -p /srv") + sudo("chown -R %s.%s /srv" % (env.user, env.user)) with settings(warn_only=True): - run('git clone https://github.com/samuelclay/NewsBlur.git %s' % env.NEWSBLUR_PATH) + run("git clone https://github.com/samuelclay/NewsBlur.git %s" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('ln -sfn /srv/code /home/%s/code' % env.user) - sudo('ln -sfn /srv/newsblur /home/%s/newsblur' % env.user) + sudo("ln -sfn /srv/code /home/%s/code" % env.user) + sudo("ln -sfn /srv/newsblur /home/%s/newsblur" % env.user) + def setup_repo_local_settings(): with virtualenv(): - run('cp newsblur/local_settings.py.template newsblur/local_settings.py') - run('mkdir -p logs') - run('touch logs/newsblur.log') + run("cp newsblur/local_settings.py.template newsblur/local_settings.py") + run("mkdir -p logs") + run("touch logs/newsblur.log") + def setup_local_files(): - run('mkdir -p ~/.config/procps') + run("mkdir -p ~/.config/procps") put("config/toprc", "~/.config/procps/toprc") - run('rm -f ~/.toprc') + run("rm -f ~/.toprc") put("config/zshrc", "~/.zshrc") - put('config/gitconfig.txt', '~/.gitconfig') - put('config/ssh.conf', '~/.ssh/config') + put("config/gitconfig.txt", "~/.gitconfig") + put("config/ssh.conf", "~/.ssh/config") + def setup_psql_client(): - sudo('apt-get -y install postgresql-client') - sudo('mkdir -p /var/run/postgresql') + sudo("apt-get -y install postgresql-client") + sudo("mkdir -p /var/run/postgresql") with settings(warn_only=True): - sudo('chown postgres.postgres /var/run/postgresql') + sudo("chown postgres.postgres /var/run/postgresql") + def setup_libxml(): - sudo('apt-get -y install libxml2-dev libxslt1-dev python-lxml') + sudo("apt-get -y install libxml2-dev libxslt1-dev python-lxml") + def setup_libxml_code(): with cd(env.VENDOR_PATH): - run('git clone git://git.gnome.org/libxml2') - run('git clone git://git.gnome.org/libxslt') + run("git clone git://git.gnome.org/libxml2") + run("git clone git://git.gnome.org/libxslt") + + with cd(os.path.join(env.VENDOR_PATH, "libxml2")): + run("./configure && make && sudo make install") - with cd(os.path.join(env.VENDOR_PATH, 'libxml2')): - run('./configure && make && sudo make install') + with cd(os.path.join(env.VENDOR_PATH, "libxslt")): + run("./configure && make && sudo make install") - with cd(os.path.join(env.VENDOR_PATH, 'libxslt')): - run('./configure && make && sudo make install') def setup_psycopg(): - sudo('easy_install -U psycopg2') + sudo("easy_install -U psycopg2") + def setup_virtualenv(): - sudo('rm -fr ~/.cache') # Clean `sudo pip` - sudo('pip install --upgrade virtualenv') - sudo('pip install --upgrade virtualenvwrapper') + sudo("rm -fr ~/.cache") # Clean `sudo pip` + sudo("pip install --upgrade virtualenv") + sudo("pip install --upgrade virtualenvwrapper") setup_local_files() - with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')): - with prefix('source /usr/local/bin/virtualenvwrapper.sh'): + with prefix("WORKON_HOME=%s" % os.path.join(env.NEWSBLUR_PATH, "venv")): + with prefix("source /usr/local/bin/virtualenvwrapper.sh"): with cd(env.NEWSBLUR_PATH): # sudo('rmvirtualenv newsblur') # sudo('rm -fr venv') with settings(warn_only=True): - run('mkvirtualenv newsblur') + run("mkvirtualenv newsblur") # run('echo "import sys; sys.setdefaultencoding(\'utf-8\')" | sudo tee venv/newsblur/lib/python2.7/sitecustomize.py') # run('echo "/srv/newsblur" | sudo tee venv/newsblur/lib/python2.7/site-packages/newsblur.pth') - + + @_contextmanager def virtualenv(): - with prefix('WORKON_HOME=%s' % os.path.join(env.NEWSBLUR_PATH, 'venv')): - with prefix('source /usr/local/bin/virtualenvwrapper.sh'): + with prefix("WORKON_HOME=%s" % os.path.join(env.NEWSBLUR_PATH, "venv")): + with prefix("source /usr/local/bin/virtualenvwrapper.sh"): with cd(env.NEWSBLUR_PATH): - with prefix('workon newsblur'): + with prefix("workon newsblur"): yield + def setup_pip(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('curl https://bootstrap.pypa.io/2.6/get-pip.py | sudo python2') + run("curl https://bootstrap.pypa.io/2.6/get-pip.py | sudo python2") # sudo('python2 get-pip.py') @@ -541,18 +602,19 @@ def pip(): with virtualenv(): if role == "task": with settings(warn_only=True): - sudo('fallocate -l 4G /swapfile') - sudo('chmod 600 /swapfile') - sudo('mkswap /swapfile') - sudo('swapon /swapfile') - sudo('chown %s.%s -R %s' % (env.user, env.user, os.path.join(env.NEWSBLUR_PATH, 'venv'))) + sudo("fallocate -l 4G /swapfile") + sudo("chmod 600 /swapfile") + sudo("mkswap /swapfile") + sudo("swapon /swapfile") + sudo("chown %s.%s -R %s" % (env.user, env.user, os.path.join(env.NEWSBLUR_PATH, "venv"))) # run('easy_install -U pip') # run('pip install --upgrade pip') # run('pip install --upgrade setuptools') - run('pip install -r requirements.txt') + run("pip install -r requirements.txt") if role == "task": with settings(warn_only=True): - sudo('swapoff /swapfile') + sudo("swapoff /swapfile") + def solo_pip(role): if role == "app": @@ -564,170 +626,195 @@ def solo_pip(role): copy_task_settings() pip() celery() - + + def setup_supervisor(): - sudo('apt-get update') - sudo('apt-get -y install supervisor') - put('config/supervisord.conf', '/etc/supervisor/supervisord.conf', use_sudo=True) - sudo('/etc/init.d/supervisor stop') - sudo('sleep 2') - sudo('ulimit -n 100000 && /etc/init.d/supervisor start') + sudo("apt-get update") + sudo("apt-get -y install supervisor") + put("config/supervisord.conf", "/etc/supervisor/supervisord.conf", use_sudo=True) + sudo("/etc/init.d/supervisor stop") + sudo("sleep 2") + sudo("ulimit -n 100000 && /etc/init.d/supervisor start") sudo("/usr/sbin/update-rc.d -f supervisor defaults") - sudo('systemctl enable supervisor') - sudo('systemctl start supervisor') + sudo("systemctl enable supervisor") + sudo("systemctl start supervisor") + @parallel def setup_hosts(): - put(os.path.join(env.SECRETS_PATH, 'configs/hosts'), '/etc/hosts', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/hosts"), "/etc/hosts", use_sudo=True) sudo('echo "\n\n127.0.0.1 `hostname`" | sudo tee -a /etc/hosts') + def setup_pgbouncer(): - sudo('apt-get remove -y pgbouncer') - sudo('apt-get install -y libevent-dev pkg-config libc-ares2 libc-ares-dev') - PGBOUNCER_VERSION = '1.15.0' + sudo("apt-get remove -y pgbouncer") + sudo("apt-get install -y libevent-dev pkg-config libc-ares2 libc-ares-dev") + PGBOUNCER_VERSION = "1.15.0" with cd(env.VENDOR_PATH), settings(warn_only=True): - run('wget https://pgbouncer.github.io/downloads/files/%s/pgbouncer-%s.tar.gz' % (PGBOUNCER_VERSION, PGBOUNCER_VERSION)) - run('tar -xzf pgbouncer-%s.tar.gz' % PGBOUNCER_VERSION) - run('rm pgbouncer-%s.tar.gz' % PGBOUNCER_VERSION) - with cd('pgbouncer-%s' % PGBOUNCER_VERSION): - run('./configure --prefix=/usr/local') - run('make') - sudo('make install') - sudo('ln -s /usr/local/bin/pgbouncer /usr/sbin/pgbouncer') + run( + "wget https://pgbouncer.github.io/downloads/files/%s/pgbouncer-%s.tar.gz" + % (PGBOUNCER_VERSION, PGBOUNCER_VERSION) + ) + run("tar -xzf pgbouncer-%s.tar.gz" % PGBOUNCER_VERSION) + run("rm pgbouncer-%s.tar.gz" % PGBOUNCER_VERSION) + with cd("pgbouncer-%s" % PGBOUNCER_VERSION): + run("./configure --prefix=/usr/local") + run("make") + sudo("make install") + sudo("ln -s /usr/local/bin/pgbouncer /usr/sbin/pgbouncer") config_pgbouncer() - + + def config_pgbouncer(): - sudo('mkdir -p /etc/pgbouncer') - put('config/pgbouncer.conf', 'pgbouncer.conf') - sudo('mv pgbouncer.conf /etc/pgbouncer/pgbouncer.ini') - put(os.path.join(env.SECRETS_PATH, 'configs/pgbouncer_auth.conf'), 'userlist.txt') - sudo('mv userlist.txt /etc/pgbouncer/userlist.txt') + sudo("mkdir -p /etc/pgbouncer") + put("config/pgbouncer.conf", "pgbouncer.conf") + sudo("mv pgbouncer.conf /etc/pgbouncer/pgbouncer.ini") + put(os.path.join(env.SECRETS_PATH, "configs/pgbouncer_auth.conf"), "userlist.txt") + sudo("mv userlist.txt /etc/pgbouncer/userlist.txt") sudo('echo "START=1" | sudo tee /etc/default/pgbouncer') # sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False) with settings(warn_only=True): - sudo('/etc/init.d/pgbouncer stop') - sudo('pkill -9 pgbouncer -e') - run('sleep 2') - sudo('/etc/init.d/pgbouncer start', pty=False) + sudo("/etc/init.d/pgbouncer stop") + sudo("pkill -9 pgbouncer -e") + run("sleep 2") + sudo("/etc/init.d/pgbouncer start", pty=False) + @parallel def kill_pgbouncer(stop=False): # sudo('su postgres -c "/etc/init.d/pgbouncer stop"', pty=False) with settings(warn_only=True): - sudo('/etc/init.d/pgbouncer stop') - run('sleep 2') - sudo('rm /var/log/postgresql/pgbouncer.pid') + sudo("/etc/init.d/pgbouncer stop") + run("sleep 2") + sudo("rm /var/log/postgresql/pgbouncer.pid") with settings(warn_only=True): - sudo('pkill -9 pgbouncer') - run('sleep 2') + sudo("pkill -9 pgbouncer") + run("sleep 2") if not stop: - run('sudo /etc/init.d/pgbouncer start', pty=False) + run("sudo /etc/init.d/pgbouncer start", pty=False) + def config_monit_task(): - put('config/monit_task.conf', '/etc/monit/conf.d/celery.conf', use_sudo=True) + put("config/monit_task.conf", "/etc/monit/conf.d/celery.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_node(): - put('config/monit_node.conf', '/etc/monit/conf.d/node.conf', use_sudo=True) + put("config/monit_node.conf", "/etc/monit/conf.d/node.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_original(): - put('config/monit_original.conf', '/etc/monit/conf.d/node_original.conf', use_sudo=True) + put("config/monit_original.conf", "/etc/monit/conf.d/node_original.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_app(): - put('config/monit_app.conf', '/etc/monit/conf.d/gunicorn.conf', use_sudo=True) + put("config/monit_app.conf", "/etc/monit/conf.d/gunicorn.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_work(): - put('config/monit_work.conf', '/etc/monit/conf.d/work.conf', use_sudo=True) + put("config/monit_work.conf", "/etc/monit/conf.d/work.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def config_monit_redis(): - sudo('chown root.root /etc/init.d/redis') - sudo('chmod a+x /etc/init.d/redis') - put('config/monit_debug.sh', '/etc/monit/monit_debug.sh', use_sudo=True) - sudo('chmod a+x /etc/monit/monit_debug.sh') - put('config/monit_redis.conf', '/etc/monit/conf.d/redis.conf', use_sudo=True) + sudo("chown root.root /etc/init.d/redis") + sudo("chmod a+x /etc/init.d/redis") + put("config/monit_debug.sh", "/etc/monit/monit_debug.sh", use_sudo=True) + sudo("chmod a+x /etc/monit/monit_debug.sh") + put("config/monit_redis.conf", "/etc/monit/conf.d/redis.conf", use_sudo=True) sudo('echo "START=yes" | sudo tee /etc/default/monit') - sudo('/etc/init.d/monit restart') + sudo("/etc/init.d/monit restart") + def setup_mongoengine_repo(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('rm -fr mongoengine') - run('git clone https://github.com/MongoEngine/mongoengine.git') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine-*') - sudo('ln -sfn %s /usr/local/lib/python2.7/dist-packages/mongoengine' % - os.path.join(env.VENDOR_PATH, 'mongoengine/mongoengine')) - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')), settings(warn_only=True): - run('git co v0.8.2') + run("rm -fr mongoengine") + run("git clone https://github.com/MongoEngine/mongoengine.git") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/mongoengine-*") + sudo( + "ln -sfn %s /usr/local/lib/python2.7/dist-packages/mongoengine" + % os.path.join(env.VENDOR_PATH, "mongoengine/mongoengine") + ) + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")), settings(warn_only=True): + run("git co v0.8.2") + def clear_pymongo_repo(): - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/pymongo*') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/bson*') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/gridfs*') - + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/pymongo*") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/bson*") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/gridfs*") + + def setup_pymongo_repo(): with cd(env.VENDOR_PATH), settings(warn_only=True): - run('git clone git://github.com/mongodb/mongo-python-driver.git pymongo') + run("git clone git://github.com/mongodb/mongo-python-driver.git pymongo") # with cd(os.path.join(env.VENDOR_PATH, 'pymongo')): # sudo('python setup.py install') clear_pymongo_repo() - sudo('ln -sfn %s /usr/local/lib/python2.7/dist-packages/' % - os.path.join(env.VENDOR_PATH, 'pymongo/{pymongo,bson,gridfs}')) + sudo( + "ln -sfn %s /usr/local/lib/python2.7/dist-packages/" + % os.path.join(env.VENDOR_PATH, "pymongo/{pymongo,bson,gridfs}") + ) + def setup_forked_mongoengine(): - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')), settings(warn_only=True): - run('git remote add clay https://github.com/samuelclay/mongoengine.git') - run('git pull') - run('git fetch clay') - run('git checkout -b clay_master clay/master') + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")), settings(warn_only=True): + run("git remote add clay https://github.com/samuelclay/mongoengine.git") + run("git pull") + run("git fetch clay") + run("git checkout -b clay_master clay/master") + def switch_forked_mongoengine(): - with cd(os.path.join(env.VENDOR_PATH, 'mongoengine')): - run('git co dev') - run('git pull %s dev --force' % env.user) + with cd(os.path.join(env.VENDOR_PATH, "mongoengine")): + run("git co dev") + run("git pull %s dev --force" % env.user) # run('git checkout .') # run('git checkout master') # run('get branch -D dev') # run('git checkout -b dev origin/dev') + def setup_logrotate(clear=True): if clear: - run('find /srv/newsblur/logs/*.log | xargs tee') + run("find /srv/newsblur/logs/*.log | xargs tee") with settings(warn_only=True): - sudo('find /var/log/mongodb/*.log | xargs tee') - put('config/logrotate.conf', '/etc/logrotate.d/newsblur', use_sudo=True) - put('config/logrotate.mongo.conf', '/etc/logrotate.d/mongodb', use_sudo=True) - put('config/logrotate.nginx.conf', '/etc/logrotate.d/nginx', use_sudo=True) - sudo('chown root.root /etc/logrotate.d/{newsblur,mongodb,nginx}') - sudo('chmod 644 /etc/logrotate.d/{newsblur,mongodb,nginx}') + sudo("find /var/log/mongodb/*.log | xargs tee") + put("config/logrotate.conf", "/etc/logrotate.d/newsblur", use_sudo=True) + put("config/logrotate.mongo.conf", "/etc/logrotate.d/mongodb", use_sudo=True) + put("config/logrotate.nginx.conf", "/etc/logrotate.d/nginx", use_sudo=True) + sudo("chown root.root /etc/logrotate.d/{newsblur,mongodb,nginx}") + sudo("chmod 644 /etc/logrotate.d/{newsblur,mongodb,nginx}") with settings(warn_only=True): - sudo('chown sclay.sclay /srv/newsblur/logs/*.log') - sudo('logrotate -f /etc/logrotate.d/newsblur') - sudo('logrotate -f /etc/logrotate.d/nginx') - sudo('logrotate -f /etc/logrotate.d/mongodb') + sudo("chown sclay.sclay /srv/newsblur/logs/*.log") + sudo("logrotate -f /etc/logrotate.d/newsblur") + sudo("logrotate -f /etc/logrotate.d/nginx") + sudo("logrotate -f /etc/logrotate.d/mongodb") + def setup_ulimit(): # Increase File Descriptor limits. - run('export FILEMAX=`sysctl -n fs.file-max`', pty=False) - sudo('mv /etc/security/limits.conf /etc/security/limits.conf.bak', pty=False) - sudo('touch /etc/security/limits.conf', pty=False) + run("export FILEMAX=`sysctl -n fs.file-max`", pty=False) + sudo("mv /etc/security/limits.conf /etc/security/limits.conf.bak", pty=False) + sudo("touch /etc/security/limits.conf", pty=False) run('echo "root soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "root hard nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "* soft nofile 100000\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "* hard nofile 100090\n" | sudo tee -a /etc/security/limits.conf', pty=False) run('echo "fs.file-max = 100000\n" | sudo tee -a /etc/sysctl.conf', pty=False) - sudo('sysctl -p') - sudo('ulimit -n 100000') + sudo("sysctl -p") + sudo("ulimit -n 100000") connections.connect(env.host_string) - + # run('touch /home/ubuntu/.bash_profile') # run('echo "ulimit -n $FILEMAX" >> /home/ubuntu/.bash_profile') @@ -736,67 +823,78 @@ def setup_ulimit(): # echo "net.ipv4.ip_local_port_range = 1024 65535" >> /etc/sysctl.conf # sudo chmod 644 /etc/sysctl.conf + def setup_do_monitoring(): - run('curl -sSL https://agent.digitalocean.com/install.sh | sh') - + run("curl -sSL https://agent.digitalocean.com/install.sh | sh") + + def setup_syncookies(): - sudo('echo 1 | sudo tee /proc/sys/net/ipv4/tcp_syncookies') - sudo('sudo /sbin/sysctl -w net.ipv4.tcp_syncookies=1') + sudo("echo 1 | sudo tee /proc/sys/net/ipv4/tcp_syncookies") + sudo("sudo /sbin/sysctl -w net.ipv4.tcp_syncookies=1") + def setup_sudoers(user=None): sudo('echo "%s ALL=(ALL) NOPASSWD: ALL" | sudo tee /etc/sudoers.d/sclay' % (user or env.user)) - sudo('chmod 0440 /etc/sudoers.d/sclay') + sudo("chmod 0440 /etc/sudoers.d/sclay") + def setup_nginx(): - NGINX_VERSION = '1.19.5' + NGINX_VERSION = "1.19.5" with cd(env.VENDOR_PATH), settings(warn_only=True): sudo("groupadd nginx") sudo("useradd -g nginx -d /var/www/htdocs -s /bin/false nginx") - run('wget http://nginx.org/download/nginx-%s.tar.gz' % NGINX_VERSION) - run('tar -xzf nginx-%s.tar.gz' % NGINX_VERSION) - run('rm nginx-%s.tar.gz' % NGINX_VERSION) - with cd('nginx-%s' % NGINX_VERSION): - run('./configure --with-http_ssl_module --with-http_stub_status_module --with-http_gzip_static_module --with-http_realip_module ') - run('make') - sudo('make install') + run("wget http://nginx.org/download/nginx-%s.tar.gz" % NGINX_VERSION) + run("tar -xzf nginx-%s.tar.gz" % NGINX_VERSION) + run("rm nginx-%s.tar.gz" % NGINX_VERSION) + with cd("nginx-%s" % NGINX_VERSION): + run( + "./configure --with-http_ssl_module --with-http_stub_status_module --with-http_gzip_static_module --with-http_realip_module " + ) + run("make") + sudo("make install") config_nginx() + def config_nginx(): put("config/nginx.conf", "/usr/local/nginx/conf/nginx.conf", use_sudo=True) sudo("mkdir -p /usr/local/nginx/conf/sites-enabled") sudo("mkdir -p /var/log/nginx") put("config/nginx.newsblur.conf", "/usr/local/nginx/conf/sites-enabled/newsblur.conf", use_sudo=True) put("config/nginx-init", "/etc/init.d/nginx", use_sudo=True) - sudo('sed -i -e s/nginx_none/`cat /etc/hostname`/g /usr/local/nginx/conf/sites-enabled/newsblur.conf') + sudo("sed -i -e s/nginx_none/`cat /etc/hostname`/g /usr/local/nginx/conf/sites-enabled/newsblur.conf") sudo("chmod 0755 /etc/init.d/nginx") sudo("/usr/sbin/update-rc.d -f nginx defaults") sudo("/etc/init.d/nginx restart") copy_certificates() + # =============== # = Setup - App = # =============== + def setup_app_firewall(): - sudo('ufw default deny') - sudo('ufw allow ssh') # ssh - sudo('ufw allow 80') # http - sudo('ufw allow 8000') # gunicorn - sudo('ufw allow 8888') # socket.io - sudo('ufw allow 8889') # socket.io ssl - sudo('ufw allow 443') # https - sudo('ufw --force enable') + sudo("ufw default deny") + sudo("ufw allow ssh") # ssh + sudo("ufw allow 80") # http + sudo("ufw allow 8000") # gunicorn + sudo("ufw allow 8888") # socket.io + sudo("ufw allow 8889") # socket.io ssl + sudo("ufw allow 443") # https + sudo("ufw --force enable") + def remove_gunicorn(): with cd(env.VENDOR_PATH): - sudo('rm -fr gunicorn') - + sudo("rm -fr gunicorn") + + def setup_gunicorn(supervisor=True, restart=True): if supervisor: - put('config/supervisor_gunicorn.conf', '/etc/supervisor/conf.d/gunicorn.conf', use_sudo=True) - sudo('supervisorctl reread') + put("config/supervisor_gunicorn.conf", "/etc/supervisor/conf.d/gunicorn.conf", use_sudo=True) + sudo("supervisorctl reread") if restart: - sudo('supervisorctl update') + sudo("supervisorctl update") # with cd(env.VENDOR_PATH): # sudo('rm -fr gunicorn') # run('git clone git://github.com/benoitc/gunicorn.git') @@ -806,265 +904,304 @@ def setup_gunicorn(supervisor=True, restart=True): def update_gunicorn(): - with cd(os.path.join(env.VENDOR_PATH, 'gunicorn')): - run('git pull') - sudo('python setup.py develop') + with cd(os.path.join(env.VENDOR_PATH, "gunicorn")): + run("git pull") + sudo("python setup.py develop") + def setup_staging(): - run('git clone https://github.com/samuelclay/NewsBlur.git staging') - with cd('~/staging'): - run('cp ../newsblur/local_settings.py local_settings.py') - run('mkdir -p logs') - run('touch logs/newsblur.log') + run("git clone https://github.com/samuelclay/NewsBlur.git staging") + with cd("~/staging"): + run("cp ../newsblur/local_settings.py local_settings.py") + run("mkdir -p logs") + run("touch logs/newsblur.log") + def setup_node_app(): - sudo('curl -sL https://deb.nodesource.com/setup_14.x | sudo bash -') - sudo('apt-get install -y nodejs') + sudo("curl -sL https://deb.nodesource.com/setup_14.x | sudo bash -") + sudo("apt-get install -y nodejs") # run('curl -L https://npmjs.org/install.sh | sudo sh') # sudo('apt-get install npm') - sudo('sudo npm install -g npm') - sudo('npm install -g supervisor') - sudo('ufw allow 8888') - sudo('ufw allow 4040') + sudo("sudo npm install -g npm") + sudo("npm install -g supervisor") + sudo("ufw allow 8888") + sudo("ufw allow 4040") + def config_node(full=False): - sudo('rm -f /etc/supervisor/conf.d/gunicorn.conf') - sudo('rm -f /etc/supervisor/conf.d/node.conf') - put('config/supervisor_node_unread.conf', '/etc/supervisor/conf.d/node_unread.conf', use_sudo=True) - put('config/supervisor_node_unread_ssl.conf', '/etc/supervisor/conf.d/node_unread_ssl.conf', use_sudo=True) - put('config/supervisor_node_favicons.conf', '/etc/supervisor/conf.d/node_favicons.conf', use_sudo=True) - put('config/supervisor_node_text.conf', '/etc/supervisor/conf.d/node_text.conf', use_sudo=True) - + sudo("rm -f /etc/supervisor/conf.d/gunicorn.conf") + sudo("rm -f /etc/supervisor/conf.d/node.conf") + put("config/supervisor_node_unread.conf", "/etc/supervisor/conf.d/node_unread.conf", use_sudo=True) + put( + "config/supervisor_node_unread_ssl.conf", "/etc/supervisor/conf.d/node_unread_ssl.conf", use_sudo=True + ) + put("config/supervisor_node_favicons.conf", "/etc/supervisor/conf.d/node_favicons.conf", use_sudo=True) + put("config/supervisor_node_text.conf", "/etc/supervisor/conf.d/node_text.conf", use_sudo=True) + if full: run("rm -fr /srv/newsblur/node/node_modules") with cd(os.path.join(env.NEWSBLUR_PATH, "node")): run("npm install") - - sudo('supervisorctl reload') + + sudo("supervisorctl reload") + @parallel def copy_app_settings(): - run('rm -f %s/local_settings.py' % env.NEWSBLUR_PATH) - put(os.path.join(env.SECRETS_PATH, 'settings/app_settings.py'), - '%s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) + run("rm -f %s/local_settings.py" % env.NEWSBLUR_PATH) + put( + os.path.join(env.SECRETS_PATH, "settings/app_settings.py"), + "%s/newsblur/local_settings.py" % env.NEWSBLUR_PATH, + ) run('echo "\nSERVER_NAME = \\\\"`hostname`\\\\"" >> %s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) + def assemble_certificates(): - with lcd(os.path.join(env.SECRETS_PATH, 'certificates/comodo')): - local('pwd') - local('cat STAR_newsblur_com.crt EssentialSSLCA_2.crt ComodoUTNSGCCA.crt UTNAddTrustSGCCA.crt AddTrustExternalCARoot.crt > newsblur.com.crt') - + with lcd(os.path.join(env.SECRETS_PATH, "certificates/comodo")): + local("pwd") + local( + "cat STAR_newsblur_com.crt EssentialSSLCA_2.crt ComodoUTNSGCCA.crt UTNAddTrustSGCCA.crt AddTrustExternalCARoot.crt > newsblur.com.crt" + ) + + def copy_certificates(copy=False): - cert_path = os.path.join(env.NEWSBLUR_PATH, 'config/certificates') - run('mkdir -p %s' % cert_path) + cert_path = os.path.join(env.NEWSBLUR_PATH, "config/certificates") + run("mkdir -p %s" % cert_path) fullchain_path = "/etc/letsencrypt/live/newsblur.com/fullchain.pem" privkey_path = "/etc/letsencrypt/live/newsblur.com/privkey.pem" if copy: - sudo('mkdir -p %s' % os.path.dirname(fullchain_path)) - put(os.path.join(env.SECRETS_PATH, 'certificates/newsblur.com.pem'), fullchain_path, use_sudo=True) - put(os.path.join(env.SECRETS_PATH, 'certificates/newsblur.com.key'), privkey_path, use_sudo=True) - - run('ln -fs %s %s' % (fullchain_path, os.path.join(cert_path, 'newsblur.com.crt'))) - run('ln -fs %s %s' % (fullchain_path, os.path.join(cert_path, 'newsblur.com.pem'))) # For backwards compatibility with hard-coded nginx configs - run('ln -fs %s %s' % (privkey_path, os.path.join(cert_path, 'newsblur.com.key'))) - run('ln -fs %s %s' % (privkey_path, os.path.join(cert_path, 'newsblur.com.crt.key'))) # HAProxy - put(os.path.join(env.SECRETS_PATH, 'certificates/comodo/dhparams.pem'), cert_path) - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps_development.pem'), cert_path) + sudo("mkdir -p %s" % os.path.dirname(fullchain_path)) + put(os.path.join(env.SECRETS_PATH, "certificates/newsblur.com.pem"), fullchain_path, use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "certificates/newsblur.com.key"), privkey_path, use_sudo=True) + + run("ln -fs %s %s" % (fullchain_path, os.path.join(cert_path, "newsblur.com.crt"))) + run( + "ln -fs %s %s" % (fullchain_path, os.path.join(cert_path, "newsblur.com.pem")) + ) # For backwards compatibility with hard-coded nginx configs + run("ln -fs %s %s" % (privkey_path, os.path.join(cert_path, "newsblur.com.key"))) + run("ln -fs %s %s" % (privkey_path, os.path.join(cert_path, "newsblur.com.crt.key"))) # HAProxy + put(os.path.join(env.SECRETS_PATH, "certificates/comodo/dhparams.pem"), cert_path) + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps_development.pem"), cert_path) # Export aps.cer from Apple issued certificate using Keychain Assistant # openssl x509 -in aps.cer -inform DER -outform PEM -out aps.pem - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.pem'), cert_path) + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps.pem"), cert_path) # Export aps.p12 from aps.cer using Keychain Assistant # openssl pkcs12 -in aps.p12 -out aps.p12.pem -nodes - put(os.path.join(env.SECRETS_PATH, 'certificates/ios/aps.p12.pem'), cert_path) - + put(os.path.join(env.SECRETS_PATH, "certificates/ios/aps.p12.pem"), cert_path) + + def setup_certbot(): - sudo('snap install --classic certbot') - sudo('snap set certbot trust-plugin-with-root=ok') - sudo('snap install certbot-dns-dnsimple') - sudo('ln -fs /snap/bin/certbot /usr/bin/certbot') - put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), - os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) - sudo('chmod 0600 %s' % os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) - sudo('certbot certonly -n --agree-tos ' - ' --dns-dnsimple --dns-dnsimple-credentials %s' - ' --email samuel@newsblur.com --domains newsblur.com ' - ' -d "*.newsblur.com" -d "popular.global.newsblur.com"' % - (os.path.join(env.NEWSBLUR_PATH, 'certbot.conf'))) - sudo('chmod 0755 /etc/letsencrypt/{live,archive}') - sudo('chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem') - + sudo("snap install --classic certbot") + sudo("snap set certbot trust-plugin-with-root=ok") + sudo("snap install certbot-dns-dnsimple") + sudo("ln -fs /snap/bin/certbot /usr/bin/certbot") + put( + os.path.join(env.SECRETS_PATH, "configs/certbot.conf"), + os.path.join(env.NEWSBLUR_PATH, "certbot.conf"), + ) + sudo("chmod 0600 %s" % os.path.join(env.NEWSBLUR_PATH, "certbot.conf")) + sudo( + "certbot certonly -n --agree-tos " + " --dns-dnsimple --dns-dnsimple-credentials %s" + " --email samuel@newsblur.com --domains newsblur.com " + ' -d "*.newsblur.com" -d "popular.global.newsblur.com"' + % (os.path.join(env.NEWSBLUR_PATH, "certbot.conf")) + ) + sudo("chmod 0755 /etc/letsencrypt/{live,archive}") + sudo("chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem") + + # def setup_certbot_old(): # sudo('add-apt-repository -y universe') # sudo('add-apt-repository -y ppa:certbot/certbot') # sudo('apt-get update') # sudo('apt-get install -y certbot') # sudo('apt-get install -y python3-certbot-dns-dnsimple') -# put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), +# put(os.path.join(env.SECRETS_PATH, 'configs/certbot.conf'), # os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) # sudo('chmod 0600 %s' % os.path.join(env.NEWSBLUR_PATH, 'certbot.conf')) # sudo('certbot certonly -n --agree-tos ' # ' --dns-dnsimple --dns-dnsimple-credentials %s' # ' --email samuel@newsblur.com --domains newsblur.com ' -# ' -d "*.newsblur.com" -d "global.popular.newsblur.com"' % +# ' -d "*.newsblur.com" -d "global.popular.newsblur.com"' % # (os.path.join(env.NEWSBLUR_PATH, 'certbot.conf'))) # sudo('chmod 0755 /etc/letsencrypt/{live,archive}') # sudo('chmod 0755 /etc/letsencrypt/archive/newsblur.com/privkey1.pem') - + + @parallel def maintenance_on(): role = role_for_host() - if role in ['work', 'search']: - sudo('supervisorctl stop all') + if role in ["work", "search"]: + sudo("supervisorctl stop all") else: - put('templates/maintenance_off.html', '%s/templates/maintenance_off.html' % env.NEWSBLUR_PATH) + put("templates/maintenance_off.html", "%s/templates/maintenance_off.html" % env.NEWSBLUR_PATH) with virtualenv(): - run('mv templates/maintenance_off.html templates/maintenance_on.html') + run("mv templates/maintenance_off.html templates/maintenance_on.html") + @parallel def maintenance_off(): role = role_for_host() - if role in ['work', 'search']: - sudo('supervisorctl start all') + if role in ["work", "search"]: + sudo("supervisorctl start all") else: with virtualenv(): - run('mv templates/maintenance_on.html templates/maintenance_off.html') - run('git checkout templates/maintenance_off.html') + run("mv templates/maintenance_on.html templates/maintenance_off.html") + run("git checkout templates/maintenance_off.html") + def setup_haproxy(debug=False): version = "2.3.3" - sudo('ufw allow 81') # nginx moved - sudo('ufw allow 1936') # haproxy stats + sudo("ufw allow 81") # nginx moved + sudo("ufw allow 1936") # haproxy stats # sudo('apt-get install -y haproxy') # sudo('apt-get remove -y haproxy') with cd(env.VENDOR_PATH): - run('wget http://www.haproxy.org/download/2.3/src/haproxy-%s.tar.gz' % version) - run('tar -xf haproxy-%s.tar.gz' % version) - with cd('haproxy-%s' % version): - run('make TARGET=linux-glibc USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1') - sudo('make install') - put('config/haproxy-init', '/etc/init.d/haproxy', use_sudo=True) - sudo('chmod u+x /etc/init.d/haproxy') - sudo('mkdir -p /etc/haproxy') + run("wget http://www.haproxy.org/download/2.3/src/haproxy-%s.tar.gz" % version) + run("tar -xf haproxy-%s.tar.gz" % version) + with cd("haproxy-%s" % version): + run("make TARGET=linux-glibc USE_PCRE=1 USE_OPENSSL=1 USE_ZLIB=1") + sudo("make install") + put("config/haproxy-init", "/etc/init.d/haproxy", use_sudo=True) + sudo("chmod u+x /etc/init.d/haproxy") + sudo("mkdir -p /etc/haproxy") if debug: - put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) + put("config/debug_haproxy.conf", "/etc/haproxy/haproxy.cfg", use_sudo=True) else: build_haproxy() - put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), - '/etc/haproxy/haproxy.cfg', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "/etc/haproxy/haproxy.cfg", use_sudo=True) sudo('echo "ENABLED=1" | sudo tee /etc/default/haproxy') cert_path = "%s/config/certificates" % env.NEWSBLUR_PATH - run('cat %s/newsblur.com.crt > %s/newsblur.pem' % (cert_path, cert_path)) - run('cat %s/newsblur.com.key >> %s/newsblur.pem' % (cert_path, cert_path)) - run('ln -s %s/newsblur.com.key %s/newsblur.pem.key' % (cert_path, cert_path)) - put('config/haproxy_rsyslog.conf', '/etc/rsyslog.d/49-haproxy.conf', use_sudo=True) + run("cat %s/newsblur.com.crt > %s/newsblur.pem" % (cert_path, cert_path)) + run("cat %s/newsblur.com.key >> %s/newsblur.pem" % (cert_path, cert_path)) + run("ln -s %s/newsblur.com.key %s/newsblur.pem.key" % (cert_path, cert_path)) + put("config/haproxy_rsyslog.conf", "/etc/rsyslog.d/49-haproxy.conf", use_sudo=True) # sudo('restart rsyslog') - sudo('update-rc.d -f haproxy defaults') + sudo("update-rc.d -f haproxy defaults") + + sudo("/etc/init.d/haproxy stop") + run("sleep 5") + sudo("/etc/init.d/haproxy start") - sudo('/etc/init.d/haproxy stop') - run('sleep 5') - sudo('/etc/init.d/haproxy start') def config_haproxy(debug=False): if debug: - put('config/debug_haproxy.conf', '/etc/haproxy/haproxy.cfg', use_sudo=True) + put("config/debug_haproxy.conf", "/etc/haproxy/haproxy.cfg", use_sudo=True) else: build_haproxy() - put(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), - '/etc/haproxy/haproxy.cfg', use_sudo=True) + put(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "/etc/haproxy/haproxy.cfg", use_sudo=True) - haproxy_check = run('haproxy -c -f /etc/haproxy/haproxy.cfg') + haproxy_check = run("haproxy -c -f /etc/haproxy/haproxy.cfg") if haproxy_check.return_code == 0: - sudo('/etc/init.d/haproxy reload') + sudo("/etc/init.d/haproxy reload") else: print(" !!!> Uh-oh, HAProxy config doesn't check out: %s" % haproxy_check.return_code) + def build_haproxy(): droplets = assign_digitalocean_roledefs(split=True) servers = defaultdict(list) - gunicorn_counts_servers = ['app22', 'app26'] - gunicorn_refresh_servers = ['app20', 'app21'] - maintenance_servers = ['app20'] - node_socket3_servers = ['node02', 'node03'] + gunicorn_counts_servers = ["app22", "app26"] + gunicorn_refresh_servers = ["app20", "app21"] + maintenance_servers = ["app20"] + node_socket3_servers = ["node02", "node03"] ignore_servers = [] - - for group_type in ['app', 'push', 'work', 'node_socket', 'node_socket3', 'node_favicon', 'node_text', 'www']: + + for group_type in [ + "app", + "push", + "work", + "node_socket", + "node_socket3", + "node_favicon", + "node_text", + "www", + ]: group_type_name = group_type - if 'node' in group_type: - group_type_name = 'node' + if "node" in group_type: + group_type_name = "node" for server in droplets[group_type_name]: - droplet_nums = re.findall(r'\d+', server['name']) - droplet_num = droplet_nums[0] if droplet_nums else '' + droplet_nums = re.findall(r"\d+", server["name"]) + droplet_num = droplet_nums[0] if droplet_nums else "" server_type = group_type port = 80 check_inter = 3000 - - if server['name'] in ignore_servers: - print(" ---> Ignoring %s" % server['name']) + + if server["name"] in ignore_servers: + print(" ---> Ignoring %s" % server["name"]) continue - if server['name'] in node_socket3_servers and group_type != 'node_socket3': + if server["name"] in node_socket3_servers and group_type != "node_socket3": continue - if server['name'] not in node_socket3_servers and group_type == 'node_socket3': + if server["name"] not in node_socket3_servers and group_type == "node_socket3": continue - if server_type == 'www': + if server_type == "www": port = 81 - if group_type == 'node_socket': + if group_type == "node_socket": port = 8888 - if group_type == 'node_socket3': + if group_type == "node_socket3": port = 8888 - if group_type == 'node_text': + if group_type == "node_text": port = 4040 - if group_type in ['app', 'push']: + if group_type in ["app", "push"]: port = 8000 - address = "%s:%s" % (server['address'], port) - - if server_type == 'app': - nginx_address = "%s:80" % (server['address']) - servers['nginx'].append(" server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address)) - if server['name'] in maintenance_servers: - nginx_address = "%s:80" % (server['address']) - servers['maintenance'].append(" server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address)) - - if server['name'] in gunicorn_counts_servers: - server_type = 'gunicorn_counts' + address = "%s:%s" % (server["address"], port) + + if server_type == "app": + nginx_address = "%s:80" % (server["address"]) + servers["nginx"].append( + " server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address) + ) + if server["name"] in maintenance_servers: + nginx_address = "%s:80" % (server["address"]) + servers["maintenance"].append( + " server nginx%-15s %-22s check inter 3000ms" % (droplet_num, nginx_address) + ) + + if server["name"] in gunicorn_counts_servers: + server_type = "gunicorn_counts" check_inter = 15000 - elif server['name'] in gunicorn_refresh_servers: - server_type = 'gunicorn_refresh' + elif server["name"] in gunicorn_refresh_servers: + server_type = "gunicorn_refresh" check_inter = 30000 - + server_name = "%s%s" % (server_type, droplet_num) - servers[server_type].append(" server %-20s %-22s check inter %sms" % (server_name, address, check_inter)) - - h = open(os.path.join(env.NEWSBLUR_PATH, 'config/haproxy.conf.template'), 'r') + servers[server_type].append( + " server %-20s %-22s check inter %sms" % (server_name, address, check_inter) + ) + + h = open(os.path.join(env.NEWSBLUR_PATH, "config/haproxy.conf.template"), "r") haproxy_template = h.read() for sub, server_list in list(servers.items()): - sorted_servers = '\n'.join(sorted(server_list)) + sorted_servers = "\n".join(sorted(server_list)) haproxy_template = haproxy_template.replace("{{ %s }}" % sub, sorted_servers) - f = open(os.path.join(env.SECRETS_PATH, 'configs/haproxy.conf'), 'w') + f = open(os.path.join(env.SECRETS_PATH, "configs/haproxy.conf"), "w") f.write(haproxy_template) f.close() + def upgrade_django(role=None): if not role: role = role_for_host() with virtualenv(), settings(warn_only=True): - sudo('sudo dpkg --configure -a') + sudo("sudo dpkg --configure -a") setup_supervisor() pull() - run('git co django1.11') + run("git co django1.11") if role == "task": - sudo('supervisorctl stop celery') - run('./utils/kill_celery.sh') + sudo("supervisorctl stop celery") + run("./utils/kill_celery.sh") copy_task_settings() enable_celery_supervisor(update=False) elif role == "work": copy_app_settings() enable_celerybeat() elif role == "web" or role == "app": - sudo('supervisorctl stop gunicorn') - run('./utils/kill_gunicorn.sh') + sudo("supervisorctl stop gunicorn") + run("./utils/kill_gunicorn.sh") copy_app_settings() setup_gunicorn(restart=False) elif role == "node": @@ -1078,90 +1215,97 @@ def upgrade_django(role=None): # sudo('reboot') + def clean(): with virtualenv(), settings(warn_only=True): run('find . -name "*.pyc" -exec rm -f {} \;') - + + def downgrade_django(role=None): with virtualenv(), settings(warn_only=True): pull() - run('git co master') + run("git co master") pip() - run('pip uninstall -y django-paypal') + run("pip uninstall -y django-paypal") if role == "task": copy_task_settings() enable_celery_supervisor() else: copy_app_settings() deploy() - + + def vendorize_paypal(): with virtualenv(), settings(warn_only=True): - run('pip uninstall -y django-paypal') + run("pip uninstall -y django-paypal") + def upgrade_pil(): with virtualenv(): pull() - run('pip install --upgrade pillow') + run("pip install --upgrade pillow") # celery_stop() - sudo('apt-get remove -y python-imaging') - sudo('supervisorctl reload') + sudo("apt-get remove -y python-imaging") + sudo("supervisorctl reload") # kill() + def downgrade_pil(): with virtualenv(): - sudo('apt-get install -y python-imaging') - sudo('rm -fr /usr/local/lib/python2.7/dist-packages/Pillow*') + sudo("apt-get install -y python-imaging") + sudo("rm -fr /usr/local/lib/python2.7/dist-packages/Pillow*") pull() - sudo('supervisorctl reload') + sudo("supervisorctl reload") # kill() + def setup_db_monitor(): pull() with virtualenv(): - sudo('apt-get install -y libpq-dev python2.7-dev') - run('pip install -r flask/requirements.txt') - put('flask/supervisor_db_monitor.conf', '/etc/supervisor/conf.d/db_monitor.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') - + sudo("apt-get install -y libpq-dev python2.7-dev") + run("pip install -r flask/requirements.txt") + put("flask/supervisor_db_monitor.conf", "/etc/supervisor/conf.d/db_monitor.conf", use_sudo=True) + sudo("supervisorctl reread") + sudo("supervisorctl update") + + # ============== # = Setup - DB = # ============== + @parallel def setup_db_firewall(): ports = [ - 5432, # PostgreSQL + 5432, # PostgreSQL 27017, # MongoDB 28017, # MongoDB web 27019, # MongoDB config - 6379, # Redis + 6379, # Redis # 11211, # Memcached - 3060, # Node original page server - 9200, # Elasticsearch - 5000, # DB Monitor + 3060, # Node original page server + 9200, # Elasticsearch + 5000, # DB Monitor ] - sudo('ufw --force reset') - sudo('ufw default deny') - sudo('ufw allow ssh') - sudo('ufw allow 80') - sudo('ufw allow 443') + sudo("ufw --force reset") + sudo("ufw default deny") + sudo("ufw allow ssh") + sudo("ufw allow 80") + sudo("ufw allow 443") # DigitalOcean - for ip in set(env.roledefs['app'] + - env.roledefs['db'] + - env.roledefs['debug'] + - env.roledefs['task'] + - env.roledefs['work'] + - env.roledefs['push'] + - env.roledefs['www'] + - env.roledefs['search'] + - env.roledefs['node']): - sudo('ufw allow proto tcp from %s to any port %s' % ( - ip, - ','.join(map(str, ports)) - )) + for ip in set( + env.roledefs["app"] + + env.roledefs["db"] + + env.roledefs["debug"] + + env.roledefs["task"] + + env.roledefs["work"] + + env.roledefs["push"] + + env.roledefs["www"] + + env.roledefs["search"] + + env.roledefs["node"] + ): + sudo("ufw allow proto tcp from %s to any port %s" % (ip, ",".join(map(str, ports)))) # EC2 # for host in set(env.roledefs['ec2task']): @@ -1171,67 +1315,77 @@ def setup_db_firewall(): # ','.join(map(str, ports)) # )) - sudo('ufw --force enable') + sudo("ufw --force enable") + def setup_rabbitmq(): sudo('echo "deb http://www.rabbitmq.com/debian/ testing main" | sudo tee -a /etc/apt/sources.list') - run('wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc') - sudo('apt-key add rabbitmq-signing-key-public.asc') - run('rm rabbitmq-signing-key-public.asc') - sudo('apt-get update') - sudo('apt-get install -y rabbitmq-server') - sudo('rabbitmqctl add_user newsblur newsblur') - sudo('rabbitmqctl add_vhost newsblurvhost') + run("wget http://www.rabbitmq.com/rabbitmq-signing-key-public.asc") + sudo("apt-key add rabbitmq-signing-key-public.asc") + run("rm rabbitmq-signing-key-public.asc") + sudo("apt-get update") + sudo("apt-get install -y rabbitmq-server") + sudo("rabbitmqctl add_user newsblur newsblur") + sudo("rabbitmqctl add_vhost newsblurvhost") sudo('rabbitmqctl set_permissions -p newsblurvhost newsblur ".*" ".*" ".*"') + # def setup_memcached(): # sudo('apt-get -y install memcached') + def setup_postgres(standby=False): shmmax = 17818362112 hugepages = 9000 - sudo('echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" |sudo tee /etc/apt/sources.list.d/pgdg.list') - sudo('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -') - sudo('apt update') - sudo('apt install -y postgresql-13') - put('config/postgresql-13.conf', '/etc/postgresql/13/main/postgresql.conf', use_sudo=True) - put('config/postgres_hba-13.conf', '/etc/postgresql/13/main/pg_hba.conf', use_sudo=True) - sudo('mkdir -p /var/lib/postgresql/13/archive') - sudo('chown -R postgres.postgres /etc/postgresql/13/main') - sudo('chown -R postgres.postgres /var/lib/postgresql/13/main') - sudo('chown -R postgres.postgres /var/lib/postgresql/13/archive') + sudo( + 'echo "deb http://apt.postgresql.org/pub/repos/apt/ `lsb_release -cs`-pgdg main" |sudo tee /etc/apt/sources.list.d/pgdg.list' + ) + sudo("wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | sudo apt-key add -") + sudo("apt update") + sudo("apt install -y postgresql-13") + put("config/postgresql-13.conf", "/etc/postgresql/13/main/postgresql.conf", use_sudo=True) + put("config/postgres_hba-13.conf", "/etc/postgresql/13/main/pg_hba.conf", use_sudo=True) + sudo("mkdir -p /var/lib/postgresql/13/archive") + sudo("chown -R postgres.postgres /etc/postgresql/13/main") + sudo("chown -R postgres.postgres /var/lib/postgresql/13/main") + sudo("chown -R postgres.postgres /var/lib/postgresql/13/archive") sudo('echo "%s" | sudo tee /proc/sys/kernel/shmmax' % shmmax) sudo('echo "\nkernel.shmmax = %s" | sudo tee -a /etc/sysctl.conf' % shmmax) sudo('echo "\nvm.nr_hugepages = %s\n" | sudo tee -a /etc/sysctl.conf' % hugepages) run('echo "ulimit -n 100000" > postgresql.defaults') - sudo('mv postgresql.defaults /etc/default/postgresql') - sudo('sysctl -p') - sudo('rm -f /lib/systemd/system/postgresql.service') # Ubuntu 16 has wrong default - sudo('systemctl daemon-reload') - sudo('systemctl enable postgresql') + sudo("mv postgresql.defaults /etc/default/postgresql") + sudo("sysctl -p") + sudo("rm -f /lib/systemd/system/postgresql.service") # Ubuntu 16 has wrong default + sudo("systemctl daemon-reload") + sudo("systemctl enable postgresql") if standby: - put('config/postgresql_recovery.conf', '/var/lib/postgresql/13/recovery.conf', use_sudo=True) - sudo('chown -R postgres.postgres /var/lib/postgresql/13/recovery.conf') + put("config/postgresql_recovery.conf", "/var/lib/postgresql/13/recovery.conf", use_sudo=True) + sudo("chown -R postgres.postgres /var/lib/postgresql/13/recovery.conf") + + sudo("/etc/init.d/postgresql stop") + sudo("/etc/init.d/postgresql start") - sudo('/etc/init.d/postgresql stop') - sudo('/etc/init.d/postgresql start') def config_postgres(standby=False): - put('config/postgresql-13.conf', '/etc/postgresql/13/main/postgresql.conf', use_sudo=True) - put('config/postgres_hba.conf', '/etc/postgresql/13/main/pg_hba.conf', use_sudo=True) - sudo('chown postgres.postgres /etc/postgresql/13/main/postgresql.conf') + put("config/postgresql-13.conf", "/etc/postgresql/13/main/postgresql.conf", use_sudo=True) + put("config/postgres_hba.conf", "/etc/postgresql/13/main/pg_hba.conf", use_sudo=True) + sudo("chown postgres.postgres /etc/postgresql/13/main/postgresql.conf") run('echo "ulimit -n 100000" > postgresql.defaults') - sudo('mv postgresql.defaults /etc/default/postgresql') - - sudo('/etc/init.d/postgresql reload 13') + sudo("mv postgresql.defaults /etc/default/postgresql") + + sudo("/etc/init.d/postgresql reload 13") + def upgrade_postgres(): - sudo('su postgres -c "/usr/lib/postgresql/10/bin/pg_upgrade -b /usr/lib/postgresql/9.4/bin -B /usr/lib/postgresql/10/bin -d /var/lib/postgresql/9.4/main -D /var/lib/postgresql/10/main"') - -def copy_postgres_to_standby(master='db01'): + sudo( + 'su postgres -c "/usr/lib/postgresql/10/bin/pg_upgrade -b /usr/lib/postgresql/9.4/bin -B /usr/lib/postgresql/10/bin -d /var/lib/postgresql/9.4/main -D /var/lib/postgresql/10/main"' + ) + + +def copy_postgres_to_standby(master="db01"): # http://www.rassoc.com/gregr/weblog/2013/02/16/zero-to-postgresql-streaming-replication-in-10-mins/ - + # Make sure you can ssh from master to slave and back with the postgres user account. # Need to give postgres accounts keys in authroized_keys. @@ -1240,259 +1394,319 @@ def copy_postgres_to_standby(master='db01'): # new: sudo su postgres; ssh db_pgsql # old: sudo su postgres; ssh new # old: sudo su postgres -c "psql -c \"SELECT pg_start_backup('label', true)\"" - sudo('systemctl stop postgresql') - sudo('mkdir -p /var/lib/postgresql/9.4/archive') - sudo('chown postgres.postgres /var/lib/postgresql/9.4/archive') + sudo("systemctl stop postgresql") + sudo("mkdir -p /var/lib/postgresql/9.4/archive") + sudo("chown postgres.postgres /var/lib/postgresql/9.4/archive") with settings(warn_only=True): - sudo('su postgres -c "rsync -Pav -e \'ssh -i ~postgres/.ssh/newsblur.key\' --stats --progress postgres@%s:/var/lib/postgresql/9.4/main /var/lib/postgresql/9.4/ --exclude postmaster.pid"' % master) - put('config/postgresql_recovery.conf', '/var/lib/postgresql/9.4/main/recovery.conf', use_sudo=True) - sudo('systemctl start postgresql') + sudo( + "su postgres -c \"rsync -Pav -e 'ssh -i ~postgres/.ssh/newsblur.key' --stats --progress postgres@%s:/var/lib/postgresql/9.4/main /var/lib/postgresql/9.4/ --exclude postmaster.pid\"" + % master + ) + put("config/postgresql_recovery.conf", "/var/lib/postgresql/9.4/main/recovery.conf", use_sudo=True) + sudo("systemctl start postgresql") # old: sudo su postgres -c "psql -c \"SELECT pg_stop_backup()\"" - + # Don't forget to add 'setup_postgres_backups' to new - + def disable_thp(): - put('config/disable_transparent_hugepages.sh', '/etc/init.d/disable-transparent-hugepages', use_sudo=True) - sudo('chmod 755 /etc/init.d/disable-transparent-hugepages') - sudo('update-rc.d disable-transparent-hugepages defaults') - + put("config/disable_transparent_hugepages.sh", "/etc/init.d/disable-transparent-hugepages", use_sudo=True) + sudo("chmod 755 /etc/init.d/disable-transparent-hugepages") + sudo("update-rc.d disable-transparent-hugepages defaults") + + def setup_mongo(): MONGODB_VERSION = "3.4.24" pull() disable_thp() - sudo('systemctl enable rc-local.service') # Enable rc.local - sudo('echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ + sudo("systemctl enable rc-local.service") # Enable rc.local + sudo( + 'echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/enabled\n\ fi\n\ if test -f /sys/kernel/mm/transparent_hugepage/defrag; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/defrag\n\ fi\n\n\ - exit 0" | sudo tee /etc/rc.local') - sudo('curl -fsSL https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add -') + exit 0" | sudo tee /etc/rc.local' + ) + sudo("curl -fsSL https://www.mongodb.org/static/pgp/server-3.4.asc | sudo apt-key add -") # sudo('echo "deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen" | sudo tee /etc/apt/sources.list.d/mongodb.list') # sudo('echo "\ndeb http://downloads-distro.mongodb.org/repo/debian-sysvinit dist 10gen" | sudo tee -a /etc/apt/sources.list') # sudo('echo "deb http://repo.mongodb.org/apt/ubuntu trusty/mongodb-org/3.2 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.2.list') - sudo('echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list') - sudo('apt-get update') - sudo('apt-get install -y mongodb-org=%s mongodb-org-server=%s mongodb-org-shell=%s mongodb-org-mongos=%s mongodb-org-tools=%s' % - (MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION)) - put('config/mongodb.%s.conf' % ('prod' if env.user != 'ubuntu' else 'ec2'), - '/etc/mongodb.conf', use_sudo=True) - put('config/mongodb.service', '/etc/systemd/system/mongodb.service', use_sudo=True) + sudo( + 'echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list' + ) + sudo("apt-get update") + sudo( + "apt-get install -y mongodb-org=%s mongodb-org-server=%s mongodb-org-shell=%s mongodb-org-mongos=%s mongodb-org-tools=%s" + % (MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION, MONGODB_VERSION) + ) + put( + "config/mongodb.%s.conf" % ("prod" if env.user != "ubuntu" else "ec2"), + "/etc/mongodb.conf", + use_sudo=True, + ) + put("config/mongodb.service", "/etc/systemd/system/mongodb.service", use_sudo=True) run('echo "ulimit -n 100000" > mongodb.defaults') - sudo('mv mongodb.defaults /etc/default/mongod') - sudo('mkdir -p /var/log/mongodb') - sudo('chown mongodb /var/log/mongodb') - put('config/logrotate.mongo.conf', '/etc/logrotate.d/mongod', use_sudo=True) - sudo('systemctl enable mongodb') - + sudo("mv mongodb.defaults /etc/default/mongod") + sudo("mkdir -p /var/log/mongodb") + sudo("chown mongodb /var/log/mongodb") + put("config/logrotate.mongo.conf", "/etc/logrotate.d/mongod", use_sudo=True) + sudo("systemctl enable mongodb") + # Reclaim 5% disk space used for root logs. Set to 1%. with settings(warn_only=True): - sudo('tune2fs -m 1 /dev/vda1') + sudo("tune2fs -m 1 /dev/vda1") + def setup_mongo_configsvr(): - sudo('mkdir -p /var/lib/mongodb_configsvr') - sudo('chown mongodb.mongodb /var/lib/mongodb_configsvr') - put('config/mongodb.configsvr.conf', '/etc/mongodb.configsvr.conf', use_sudo=True) - put('config/mongodb.configsvr-init', '/etc/init.d/mongodb-configsvr', use_sudo=True) - sudo('chmod u+x /etc/init.d/mongodb-configsvr') + sudo("mkdir -p /var/lib/mongodb_configsvr") + sudo("chown mongodb.mongodb /var/lib/mongodb_configsvr") + put("config/mongodb.configsvr.conf", "/etc/mongodb.configsvr.conf", use_sudo=True) + put("config/mongodb.configsvr-init", "/etc/init.d/mongodb-configsvr", use_sudo=True) + sudo("chmod u+x /etc/init.d/mongodb-configsvr") run('echo "ulimit -n 100000" > mongodb_configsvr.defaults') - sudo('mv mongodb_configsvr.defaults /etc/default/mongodb_configsvr') - sudo('update-rc.d -f mongodb-configsvr defaults') - sudo('/etc/init.d/mongodb-configsvr start') + sudo("mv mongodb_configsvr.defaults /etc/default/mongodb_configsvr") + sudo("update-rc.d -f mongodb-configsvr defaults") + sudo("/etc/init.d/mongodb-configsvr start") + def setup_mongo_mongos(): - put('config/mongodb.mongos.conf', '/etc/mongodb.mongos.conf', use_sudo=True) - put('config/mongodb.mongos-init', '/etc/init.d/mongodb-mongos', use_sudo=True) - sudo('chmod u+x /etc/init.d/mongodb-mongos') + put("config/mongodb.mongos.conf", "/etc/mongodb.mongos.conf", use_sudo=True) + put("config/mongodb.mongos-init", "/etc/init.d/mongodb-mongos", use_sudo=True) + sudo("chmod u+x /etc/init.d/mongodb-mongos") run('echo "ulimit -n 100000" > mongodb_mongos.defaults') - sudo('mv mongodb_mongos.defaults /etc/default/mongodb_mongos') - sudo('update-rc.d -f mongodb-mongos defaults') - sudo('/etc/init.d/mongodb-mongos restart') + sudo("mv mongodb_mongos.defaults /etc/default/mongodb_mongos") + sudo("update-rc.d -f mongodb-mongos defaults") + sudo("/etc/init.d/mongodb-mongos restart") + def setup_mongo_mms(): pull() - sudo('rm -f /etc/supervisor/conf.d/mongomms.conf') - sudo('supervisorctl reread') - sudo('supervisorctl update') + sudo("rm -f /etc/supervisor/conf.d/mongomms.conf") + sudo("supervisorctl reread") + sudo("supervisorctl update") with cd(env.VENDOR_PATH): - sudo('apt-get remove -y mongodb-mms-monitoring-agent') - run('curl -OL https://mms.mongodb.com/download/agent/monitoring/mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - sudo('dpkg -i mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - run('rm mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb') - put(os.path.join(env.SECRETS_PATH, 'settings/mongo_mms_config.txt'), - 'mongo_mms_config.txt') - sudo("echo \"\n\" | sudo tee -a /etc/mongodb-mms/monitoring-agent.config") - sudo('cat mongo_mms_config.txt | sudo tee -a /etc/mongodb-mms/monitoring-agent.config') - sudo('start mongodb-mms-monitoring-agent') + sudo("apt-get remove -y mongodb-mms-monitoring-agent") + run( + "curl -OL https://mms.mongodb.com/download/agent/monitoring/mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb" + ) + sudo("dpkg -i mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb") + run("rm mongodb-mms-monitoring-agent_2.2.0.70-1_amd64.deb") + put(os.path.join(env.SECRETS_PATH, "settings/mongo_mms_config.txt"), "mongo_mms_config.txt") + sudo('echo "\n" | sudo tee -a /etc/mongodb-mms/monitoring-agent.config') + sudo("cat mongo_mms_config.txt | sudo tee -a /etc/mongodb-mms/monitoring-agent.config") + sudo("start mongodb-mms-monitoring-agent") + def setup_redis(slave=False): - redis_version = '3.2.6' + redis_version = "3.2.6" with cd(env.VENDOR_PATH): - run('wget http://download.redis.io/releases/redis-%s.tar.gz' % redis_version) - run('tar -xzf redis-%s.tar.gz' % redis_version) - run('rm redis-%s.tar.gz' % redis_version) - with cd(os.path.join(env.VENDOR_PATH, 'redis-%s' % redis_version)): - sudo('make install') - put('config/redis-init', '/etc/init.d/redis', use_sudo=True) - sudo('chmod u+x /etc/init.d/redis') - put('config/redis.conf', '/etc/redis.conf', use_sudo=True) + run("wget http://download.redis.io/releases/redis-%s.tar.gz" % redis_version) + run("tar -xzf redis-%s.tar.gz" % redis_version) + run("rm redis-%s.tar.gz" % redis_version) + with cd(os.path.join(env.VENDOR_PATH, "redis-%s" % redis_version)): + sudo("make install") + put("config/redis-init", "/etc/init.d/redis", use_sudo=True) + sudo("chmod u+x /etc/init.d/redis") + put("config/redis.conf", "/etc/redis.conf", use_sudo=True) if slave: - put('config/redis_slave.conf', '/etc/redis_server.conf', use_sudo=True) + put("config/redis_slave.conf", "/etc/redis_server.conf", use_sudo=True) else: - put('config/redis_master.conf', '/etc/redis_server.conf', use_sudo=True) + put("config/redis_master.conf", "/etc/redis_server.conf", use_sudo=True) # sudo('chmod 666 /proc/sys/vm/overcommit_memory', pty=False) # run('echo "1" > /proc/sys/vm/overcommit_memory', pty=False) # sudo('chmod 644 /proc/sys/vm/overcommit_memory', pty=False) disable_thp() - sudo('systemctl enable rc-local.service') # Enable rc.local - sudo('echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ + sudo("systemctl enable rc-local.service") # Enable rc.local + sudo( + 'echo "#!/bin/sh -e\n\nif test -f /sys/kernel/mm/transparent_hugepage/enabled; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/enabled\n\ fi\n\ if test -f /sys/kernel/mm/transparent_hugepage/defrag; then\n\ echo never > /sys/kernel/mm/transparent_hugepage/defrag\n\ fi\n\n\ - exit 0" | sudo tee /etc/rc.local') + exit 0" | sudo tee /etc/rc.local' + ) sudo("echo 1 | sudo tee /proc/sys/vm/overcommit_memory") sudo('echo "vm.overcommit_memory = 1" | sudo tee -a /etc/sysctl.conf') sudo("sysctl vm.overcommit_memory=1") - put('config/redis_rclocal.txt', '/etc/rc.local', use_sudo=True) + put("config/redis_rclocal.txt", "/etc/rc.local", use_sudo=True) sudo("chown root.root /etc/rc.local") sudo("chmod a+x /etc/rc.local") sudo('echo "never" | sudo tee /sys/kernel/mm/transparent_hugepage/enabled') run('echo "\nnet.core.somaxconn=65535\n" | sudo tee -a /etc/sysctl.conf', pty=False) - sudo('mkdir -p /var/lib/redis') - sudo('update-rc.d redis defaults') - sudo('/etc/init.d/redis stop') - sudo('/etc/init.d/redis start') + sudo("mkdir -p /var/lib/redis") + sudo("update-rc.d redis defaults") + sudo("/etc/init.d/redis stop") + sudo("/etc/init.d/redis start") setup_syncookies() config_monit_redis() - + + def setup_munin(): - sudo('apt-get update') - sudo('apt-get install -y munin munin-node munin-plugins-extra spawn-fcgi') - put('config/munin.conf', '/etc/munin/munin.conf', use_sudo=True) # Only use on main munin - put('config/spawn_fcgi_munin_graph.conf', '/etc/init.d/spawn_fcgi_munin_graph', use_sudo=True) - put('config/spawn_fcgi_munin_html.conf', '/etc/init.d/spawn_fcgi_munin_html', use_sudo=True) - sudo('chmod u+x /etc/init.d/spawn_fcgi_munin_graph') - sudo('chmod u+x /etc/init.d/spawn_fcgi_munin_html') + sudo("apt-get update") + sudo("apt-get install -y munin munin-node munin-plugins-extra spawn-fcgi") + put("config/munin.conf", "/etc/munin/munin.conf", use_sudo=True) # Only use on main munin + put("config/spawn_fcgi_munin_graph.conf", "/etc/init.d/spawn_fcgi_munin_graph", use_sudo=True) + put("config/spawn_fcgi_munin_html.conf", "/etc/init.d/spawn_fcgi_munin_html", use_sudo=True) + sudo("chmod u+x /etc/init.d/spawn_fcgi_munin_graph") + sudo("chmod u+x /etc/init.d/spawn_fcgi_munin_html") with settings(warn_only=True): - sudo('chown nginx.www-data /var/log/munin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*') + sudo("chown nginx.www-data /var/log/munin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*") with settings(warn_only=True): - sudo('/etc/init.d/spawn_fcgi_munin_graph stop') - sudo('/etc/init.d/spawn_fcgi_munin_graph start') - sudo('update-rc.d spawn_fcgi_munin_graph defaults') - sudo('/etc/init.d/spawn_fcgi_munin_html stop') - sudo('/etc/init.d/spawn_fcgi_munin_html start') - sudo('update-rc.d spawn_fcgi_munin_html defaults') - sudo('/etc/init.d/munin-node stop') + sudo("/etc/init.d/spawn_fcgi_munin_graph stop") + sudo("/etc/init.d/spawn_fcgi_munin_graph start") + sudo("update-rc.d spawn_fcgi_munin_graph defaults") + sudo("/etc/init.d/spawn_fcgi_munin_html stop") + sudo("/etc/init.d/spawn_fcgi_munin_html start") + sudo("update-rc.d spawn_fcgi_munin_html defaults") + sudo("/etc/init.d/munin-node stop") time.sleep(2) - sudo('/etc/init.d/munin-node start') + sudo("/etc/init.d/munin-node start") with settings(warn_only=True): - sudo('chown nginx.www-data /var/log/munin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*') - sudo('chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*') - sudo('chmod a+rw /var/log/munin/*') + sudo("chown nginx.www-data /var/log/munin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/cgi-bin/munin-cgi*") + sudo("chown nginx.www-data /usr/lib/munin/cgi/munin-cgi*") + sudo("chmod a+rw /var/log/munin/*") with settings(warn_only=True): - sudo('/etc/init.d/spawn_fcgi_munin_graph start') - sudo('/etc/init.d/spawn_fcgi_munin_html start') + sudo("/etc/init.d/spawn_fcgi_munin_graph start") + sudo("/etc/init.d/spawn_fcgi_munin_html start") + def copy_munin_data(from_server): - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key'), '~/.ssh/newsblur.key') - put(os.path.join(env.SECRETS_PATH, 'keys/newsblur.key.pub'), '~/.ssh/newsblur.key.pub') - run('chmod 600 ~/.ssh/newsblur*') + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key"), "~/.ssh/newsblur.key") + put(os.path.join(env.SECRETS_PATH, "keys/newsblur.key.pub"), "~/.ssh/newsblur.key.pub") + run("chmod 600 ~/.ssh/newsblur*") # put("config/munin.nginx.conf", "/usr/local/nginx/conf/sites-enabled/munin.conf", use_sudo=True) - sudo('/etc/init.d/nginx reload') + sudo("/etc/init.d/nginx reload") - run("rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/var/lib/munin/ /srv/munin" % from_server) - sudo('rm -fr /var/lib/bak-munin') + run( + 'rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/var/lib/munin/ /srv/munin' + % from_server + ) + sudo("rm -fr /var/lib/bak-munin") sudo("mv /var/lib/munin /var/lib/bak-munin") sudo("mv /srv/munin /var/lib/") sudo("chown munin.munin -R /var/lib/munin") - run("sudo rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/etc/munin/ /srv/munin-etc" % from_server) - sudo('rm -fr /etc/munin') + run( + 'sudo rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/etc/munin/ /srv/munin-etc' + % from_server + ) + sudo("rm -fr /etc/munin") sudo("mv /srv/munin-etc /etc/munin") sudo("chown munin.munin -R /etc/munin") - run("sudo rsync -az -e \"ssh -i /home/sclay/.ssh/newsblur.key\" --stats --progress %s:/var/cache/munin/www/ /srv/munin-www" % from_server) - sudo('rm -fr /var/cache/munin/www') + run( + 'sudo rsync -az -e "ssh -i /home/sclay/.ssh/newsblur.key" --stats --progress %s:/var/cache/munin/www/ /srv/munin-www' + % from_server + ) + sudo("rm -fr /var/cache/munin/www") sudo("mv /srv/munin-www /var/cache/munin/www") sudo("chown munin.munin -R /var/cache/munin/www") sudo("/etc/init.d/munin restart") sudo("/etc/init.d/munin-node restart") - + def setup_db_munin(): - sudo('rm -f /etc/munin/plugins/mongo*') - sudo('rm -f /etc/munin/plugins/pg_*') - sudo('rm -f /etc/munin/plugins/redis_*') - sudo('cp -frs %s/config/munin/mongo* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('cp -frs %s/config/munin/pg_* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('cp -frs %s/config/munin/redis_* /etc/munin/plugins/' % env.NEWSBLUR_PATH) - sudo('/etc/init.d/munin-node stop') + sudo("rm -f /etc/munin/plugins/mongo*") + sudo("rm -f /etc/munin/plugins/pg_*") + sudo("rm -f /etc/munin/plugins/redis_*") + sudo("cp -frs %s/config/munin/mongo* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("cp -frs %s/config/munin/pg_* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("cp -frs %s/config/munin/redis_* /etc/munin/plugins/" % env.NEWSBLUR_PATH) + sudo("/etc/init.d/munin-node stop") time.sleep(2) - sudo('/etc/init.d/munin-node start') + sudo("/etc/init.d/munin-node start") def enable_celerybeat(): with virtualenv(): - run('mkdir -p data') - put('config/supervisor_celerybeat.conf', '/etc/supervisor/conf.d/celerybeat.conf', use_sudo=True) - put('config/supervisor_celeryd_work_queue.conf', '/etc/supervisor/conf.d/celeryd_work_queue.conf', use_sudo=True) - put('config/supervisor_celeryd_beat.conf', '/etc/supervisor/conf.d/celeryd_beat.conf', use_sudo=True) - put('config/supervisor_celeryd_beat_feeds.conf', '/etc/supervisor/conf.d/celeryd_beat_feeds.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') + run("mkdir -p data") + put("config/supervisor_celerybeat.conf", "/etc/supervisor/conf.d/celerybeat.conf", use_sudo=True) + put( + "config/supervisor_celeryd_work_queue.conf", + "/etc/supervisor/conf.d/celeryd_work_queue.conf", + use_sudo=True, + ) + put("config/supervisor_celeryd_beat.conf", "/etc/supervisor/conf.d/celeryd_beat.conf", use_sudo=True) + put( + "config/supervisor_celeryd_beat_feeds.conf", + "/etc/supervisor/conf.d/celeryd_beat_feeds.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + def setup_db_mdadm(): - sudo('apt-get -y install xfsprogs mdadm') - sudo('yes | mdadm --create /dev/md0 --level=0 -c256 --raid-devices=4 /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi') - sudo('mkfs.xfs /dev/md0') - sudo('mkdir -p /srv/db') - sudo('mount -t xfs -o rw,nobarrier,noatime,nodiratime /dev/md0 /srv/db') - sudo('mkdir -p /srv/db/mongodb') - sudo('chown mongodb.mongodb /srv/db/mongodb') + sudo("apt-get -y install xfsprogs mdadm") + sudo( + "yes | mdadm --create /dev/md0 --level=0 -c256 --raid-devices=4 /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi" + ) + sudo("mkfs.xfs /dev/md0") + sudo("mkdir -p /srv/db") + sudo("mount -t xfs -o rw,nobarrier,noatime,nodiratime /dev/md0 /srv/db") + sudo("mkdir -p /srv/db/mongodb") + sudo("chown mongodb.mongodb /srv/db/mongodb") sudo("echo 'DEVICE /dev/xvdf /dev/xvdg /dev/xvdh /dev/xvdi' | sudo tee -a /etc/mdadm/mdadm.conf") sudo("mdadm --examine --scan | sudo tee -a /etc/mdadm/mdadm.conf") - sudo("echo '/dev/md0 /srv/db xfs rw,nobarrier,noatime,nodiratime,noauto 0 0' | sudo tee -a /etc/fstab") + sudo( + "echo '/dev/md0 /srv/db xfs rw,nobarrier,noatime,nodiratime,noauto 0 0' | sudo tee -a /etc/fstab" + ) sudo("sudo update-initramfs -u -v -k `uname -r`") + def setup_original_page_server(): setup_node_app() - sudo('mkdir -p /srv/originals') - sudo('chown %s.%s -R /srv/originals' % (env.user, env.user)) # We assume that the group is the same name as the user. It's common on linux + sudo("mkdir -p /srv/originals") + sudo( + "chown %s.%s -R /srv/originals" % (env.user, env.user) + ) # We assume that the group is the same name as the user. It's common on linux config_monit_original() - put('config/supervisor_node_original.conf', - '/etc/supervisor/conf.d/node_original.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl reload') + put("config/supervisor_node_original.conf", "/etc/supervisor/conf.d/node_original.conf", use_sudo=True) + sudo("supervisorctl reread") + sudo("supervisorctl reload") + def setup_elasticsearch(): ES_VERSION = "2.4.4" - sudo('add-apt-repository -y ppa:openjdk-r/ppa') - sudo('apt-get update') - sudo('apt-get install openjdk-7-jre -y') + sudo("add-apt-repository -y ppa:openjdk-r/ppa") + sudo("apt-get update") + sudo("apt-get install openjdk-7-jre -y") with cd(env.VENDOR_PATH): - run('mkdir -p elasticsearch-%s' % ES_VERSION) - with cd(os.path.join(env.VENDOR_PATH, 'elasticsearch-%s' % ES_VERSION)): + run("mkdir -p elasticsearch-%s" % ES_VERSION) + with cd(os.path.join(env.VENDOR_PATH, "elasticsearch-%s" % ES_VERSION)): # run('wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-%s.deb' % ES_VERSION) # For v5+ - run('wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-%s.deb' % ES_VERSION) # For v1-v2 - sudo('dpkg -i elasticsearch-%s.deb' % ES_VERSION) - if not files.exists('/usr/share/elasticsearch/plugins/head'): - sudo('/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head') + run( + "wget http://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-%s.deb" + % ES_VERSION + ) # For v1-v2 + sudo("dpkg -i elasticsearch-%s.deb" % ES_VERSION) + if not files.exists("/usr/share/elasticsearch/plugins/head"): + sudo("/usr/share/elasticsearch/bin/plugin install mobz/elasticsearch-head") + def setup_db_search(): - put('config/supervisor_celeryd_search_indexer.conf', '/etc/supervisor/conf.d/celeryd_search_indexer.conf', use_sudo=True) - put('config/supervisor_celeryd_search_indexer_tasker.conf', '/etc/supervisor/conf.d/celeryd_search_indexer_tasker.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') + put( + "config/supervisor_celeryd_search_indexer.conf", + "/etc/supervisor/conf.d/celeryd_search_indexer.conf", + use_sudo=True, + ) + put( + "config/supervisor_celeryd_search_indexer_tasker.conf", + "/etc/supervisor/conf.d/celeryd_search_indexer_tasker.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + def setup_imageproxy(install_go=False): # sudo('apt-get update') @@ -1500,86 +1714,105 @@ def setup_imageproxy(install_go=False): if install_go: with cd(env.VENDOR_PATH): with settings(warn_only=True): - run('git clone https://github.com/willnorris/imageproxy.git') - run('wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz') - run('tar -xzf go1.13.3.linux-amd64.tar.gz') - run('rm go1.13.3.linux-amd64.tar.gz') - sudo('rm /usr/bin/go') - sudo('ln -s /srv/code/go/bin/go /usr/bin/go') - with cd(os.path.join(env.VENDOR_PATH, 'imageproxy')): - run('go get willnorris.com/go/imageproxy/cmd/imageproxy') - put(os.path.join(env.SECRETS_PATH, 'settings/imageproxy.key'), - '/etc/imageproxy.key', use_sudo=True) - put(os.path.join(env.NEWSBLUR_PATH, 'config/supervisor_imageproxy.conf'), '/etc/supervisor/conf.d/supervisor_imageproxy.conf', use_sudo=True) - sudo('supervisorctl reread') - sudo('supervisorctl update') - sudo('ufw allow 443') - sudo('ufw allow 80') - put(os.path.join(env.NEWSBLUR_PATH, 'config/nginx.imageproxy.conf'), "/usr/local/nginx/conf/sites-enabled/imageproxy.conf", use_sudo=True) + run("git clone https://github.com/willnorris/imageproxy.git") + run("wget https://dl.google.com/go/go1.13.3.linux-amd64.tar.gz") + run("tar -xzf go1.13.3.linux-amd64.tar.gz") + run("rm go1.13.3.linux-amd64.tar.gz") + sudo("rm /usr/bin/go") + sudo("ln -s /srv/code/go/bin/go /usr/bin/go") + with cd(os.path.join(env.VENDOR_PATH, "imageproxy")): + run("go get willnorris.com/go/imageproxy/cmd/imageproxy") + put(os.path.join(env.SECRETS_PATH, "settings/imageproxy.key"), "/etc/imageproxy.key", use_sudo=True) + put( + os.path.join(env.NEWSBLUR_PATH, "config/supervisor_imageproxy.conf"), + "/etc/supervisor/conf.d/supervisor_imageproxy.conf", + use_sudo=True, + ) + sudo("supervisorctl reread") + sudo("supervisorctl update") + sudo("ufw allow 443") + sudo("ufw allow 80") + put( + os.path.join(env.NEWSBLUR_PATH, "config/nginx.imageproxy.conf"), + "/usr/local/nginx/conf/sites-enabled/imageproxy.conf", + use_sudo=True, + ) sudo("/etc/init.d/nginx restart") - - - + + @parallel def setup_usage_monitor(): - sudo('ln -fs %s/utils/monitor_disk_usage.py /etc/cron.daily/monitor_disk_usage' % env.NEWSBLUR_PATH) - sudo('/etc/cron.daily/monitor_disk_usage') - + sudo("ln -fs %s/utils/monitor_disk_usage.py /etc/cron.daily/monitor_disk_usage" % env.NEWSBLUR_PATH) + sudo("/etc/cron.daily/monitor_disk_usage") + + @parallel def setup_feeds_fetched_monitor(): - sudo('ln -fs %s/utils/monitor_task_fetches.py /etc/cron.hourly/monitor_task_fetches' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_task_fetches') - + sudo("ln -fs %s/utils/monitor_task_fetches.py /etc/cron.hourly/monitor_task_fetches" % env.NEWSBLUR_PATH) + sudo("/etc/cron.hourly/monitor_task_fetches") + + @parallel def setup_newsletter_monitor(): - sudo('ln -fs %s/utils/monitor_newsletter_delivery.py /etc/cron.hourly/monitor_newsletter_delivery' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_newsletter_delivery') - + sudo( + "ln -fs %s/utils/monitor_newsletter_delivery.py /etc/cron.hourly/monitor_newsletter_delivery" + % env.NEWSBLUR_PATH + ) + sudo("/etc/cron.hourly/monitor_newsletter_delivery") + + @parallel def setup_queue_monitor(): - sudo('ln -fs %s/utils/monitor_work_queue.py /etc/cron.hourly/monitor_work_queue' % env.NEWSBLUR_PATH) - sudo('/etc/cron.hourly/monitor_work_queue') - + sudo("ln -fs %s/utils/monitor_work_queue.py /etc/cron.hourly/monitor_work_queue" % env.NEWSBLUR_PATH) + sudo("/etc/cron.hourly/monitor_work_queue") + + @parallel def setup_redis_monitor(): - run('sleep 5') # Wait for redis to startup so the log file is there - sudo('ln -fs %s/utils/monitor_redis_bgsave.py /etc/cron.daily/monitor_redis_bgsave' % env.NEWSBLUR_PATH) + run("sleep 5") # Wait for redis to startup so the log file is there + sudo("ln -fs %s/utils/monitor_redis_bgsave.py /etc/cron.daily/monitor_redis_bgsave" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('/etc/cron.daily/monitor_redis_bgsave') - + sudo("/etc/cron.daily/monitor_redis_bgsave") + + # ================ # = Setup - Task = # ================ + def setup_task_firewall(): - sudo('ufw default deny') - sudo('ufw allow ssh') - sudo('ufw allow 80') - sudo('ufw --force enable') + sudo("ufw default deny") + sudo("ufw allow ssh") + sudo("ufw allow 80") + sudo("ufw --force enable") + + +def setup_motd(role="app"): + motd = "/etc/update-motd.d/22-newsblur-motd" + put("config/motd_%s.txt" % role, motd, use_sudo=True) + sudo("chown root.root %s" % motd) + sudo("chmod a+x %s" % motd) -def setup_motd(role='app'): - motd = '/etc/update-motd.d/22-newsblur-motd' - put('config/motd_%s.txt' % role, motd, use_sudo=True) - sudo('chown root.root %s' % motd) - sudo('chmod a+x %s' % motd) def enable_celery_supervisor(queue=None, update=True): if not queue: - put('config/supervisor_celeryd.conf', '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True) + put("config/supervisor_celeryd.conf", "/etc/supervisor/conf.d/celeryd.conf", use_sudo=True) else: - put('config/supervisor_celeryd_%s.conf' % queue, '/etc/supervisor/conf.d/celeryd.conf', use_sudo=True) + put("config/supervisor_celeryd_%s.conf" % queue, "/etc/supervisor/conf.d/celeryd.conf", use_sudo=True) - sudo('supervisorctl reread') + sudo("supervisorctl reread") if update: - sudo('supervisorctl update') + sudo("supervisorctl update") + @parallel def copy_db_settings(): return copy_task_settings() - + + @parallel def copy_task_settings(): - server_hostname = run('hostname') + server_hostname = run("hostname") # if any([(n in server_hostname) for n in ['task', 'db', 'search', 'node', 'push']]): host = server_hostname # elif env.host: @@ -1588,31 +1821,38 @@ def copy_task_settings(): # host = env.host_string.split('.', 2)[0] with settings(warn_only=True): - run('rm -f %s/local_settings.py' % env.NEWSBLUR_PATH) - put(os.path.join(env.SECRETS_PATH, 'settings/task_settings.py'), - '%s/newsblur/local_settings.py' % env.NEWSBLUR_PATH) - run('echo "\nSERVER_NAME = \\\\"%s\\\\"" >> %s/newsblur/local_settings.py' % (host, env.NEWSBLUR_PATH)) + run("rm -f %s/local_settings.py" % env.NEWSBLUR_PATH) + put( + os.path.join(env.SECRETS_PATH, "settings/task_settings.py"), + "%s/newsblur/local_settings.py" % env.NEWSBLUR_PATH, + ) + run( + 'echo "\nSERVER_NAME = \\\\"%s\\\\"" >> %s/newsblur/local_settings.py' % (host, env.NEWSBLUR_PATH) + ) + @parallel def copy_spam(): - put(os.path.join(env.SECRETS_PATH, 'spam/spam.py'), '%s/apps/social/spam.py' % env.NEWSBLUR_PATH) - + put(os.path.join(env.SECRETS_PATH, "spam/spam.py"), "%s/apps/social/spam.py" % env.NEWSBLUR_PATH) + + # ========================= # = Setup - Digital Ocean = # ========================= DO_SIZES = { - '1': 's-1vcpu-1gb', - '2': 's-1vcpu-2gb', - '4': 's-2vcpu-4gb', - '8': 's-4vcpu-8gb', - '16': 's-6vcpu-16gb', - '32': 's-8vcpu-32gb', - '48': 's-12vcpu-48gb', - '64': 's-16vcpu-64gb', - '32c': 'c-16', + "1": "s-1vcpu-1gb", + "2": "s-1vcpu-2gb", + "4": "s-2vcpu-4gb", + "8": "s-4vcpu-8gb", + "16": "s-6vcpu-16gb", + "32": "s-8vcpu-32gb", + "48": "s-12vcpu-48gb", + "64": "s-16vcpu-64gb", + "32c": "c-16", } + def setup_do(name, size=1, image=None): instance_size = DO_SIZES[str(size)] doapi = digitalocean.Manager(token=django_settings.DO_TOKEN_FABRIC) @@ -1623,25 +1863,27 @@ def setup_do(name, size=1, image=None): image = "ubuntu-20-04-x64" else: images = dict((s.name, s.id) for s in doapi.get_all_images()) - if image == "task": + if image == "task": image = images["task-2018-02"] elif image == "app": image = images["app-2018-02"] else: images = dict((s.name, s.id) for s in doapi.get_all_images()) print(images) - + name = do_name(name) env.doname = name print("Creating droplet: %s" % name) - instance = digitalocean.Droplet(token=django_settings.DO_TOKEN_FABRIC, - name=name, - size_slug=instance_size, - image=image, - region='nyc1', - monitoring=True, - private_networking=True, - ssh_keys=ssh_key_ids) + instance = digitalocean.Droplet( + token=django_settings.DO_TOKEN_FABRIC, + name=name, + size_slug=instance_size, + image=image, + region="nyc1", + monitoring=True, + private_networking=True, + ssh_keys=ssh_key_ids, + ) instance.create() time.sleep(2) instance = digitalocean.Droplet.get_object(django_settings.DO_TOKEN_FABRIC, instance.id) @@ -1649,12 +1891,12 @@ def setup_do(name, size=1, image=None): i = 0 while True: - if instance.status == 'active': + if instance.status == "active": print("...booted: %s" % instance.ip_address) time.sleep(5) break - elif instance.status == 'new': - print(".", end=' ') + elif instance.status == "new": + print(".", end=" ") sys.stdout.flush() instance = digitalocean.Droplet.get_object(django_settings.DO_TOKEN_FABRIC, instance.id) i += 1 @@ -1669,6 +1911,7 @@ def setup_do(name, size=1, image=None): add_user_to_do() assign_digitalocean_roledefs() + def do_name(name): if re.search(r"[0-9]", name): print(" ---> Using %s as hostname" % name) @@ -1680,48 +1923,52 @@ def do_name(name): for i in range(1, 100): try_host = "%s%02d" % (name, i) if try_host not in existing_hosts: - print(" ---> %s hosts in %s (%s). %s is unused." % (len(existing_hosts), name, - ', '.join(existing_hosts), try_host)) + print( + " ---> %s hosts in %s (%s). %s is unused." + % (len(existing_hosts), name, ", ".join(existing_hosts), try_host) + ) return try_host - - + + def add_user_to_do(): env.user = "root" repo_user = "sclay" with settings(warn_only=True): - run('useradd -m %s' % (repo_user)) + run("useradd -m %s" % (repo_user)) setup_sudoers("%s" % (repo_user)) - run('mkdir -p ~%s/.ssh && chmod 700 ~%s/.ssh' % (repo_user, repo_user)) - run('rm -fr ~%s/.ssh/id_dsa*' % (repo_user)) + run("mkdir -p ~%s/.ssh && chmod 700 ~%s/.ssh" % (repo_user, repo_user)) + run("rm -fr ~%s/.ssh/id_dsa*" % (repo_user)) run('ssh-keygen -t dsa -f ~%s/.ssh/id_dsa -N ""' % (repo_user)) - run('touch ~%s/.ssh/authorized_keys' % (repo_user)) + run("touch ~%s/.ssh/authorized_keys" % (repo_user)) copy_ssh_keys() - run('chown %s.%s -R ~%s/.ssh' % (repo_user, repo_user, repo_user)) + run("chown %s.%s -R ~%s/.ssh" % (repo_user, repo_user, repo_user)) env.user = repo_user + # =============== # = Setup - EC2 = # =============== + def setup_ec2(): - AMI_NAME = 'ami-834cf1ea' # Ubuntu 64-bit 12.04 LTS + AMI_NAME = "ami-834cf1ea" # Ubuntu 64-bit 12.04 LTS # INSTANCE_TYPE = 'c1.medium' - INSTANCE_TYPE = 'c1.medium' + INSTANCE_TYPE = "c1.medium" conn = EC2Connection(django_settings.AWS_ACCESS_KEY_ID, django_settings.AWS_SECRET_ACCESS_KEY) - reservation = conn.run_instances(AMI_NAME, instance_type=INSTANCE_TYPE, - key_name=env.user, - security_groups=['db-mongo']) + reservation = conn.run_instances( + AMI_NAME, instance_type=INSTANCE_TYPE, key_name=env.user, security_groups=["db-mongo"] + ) instance = reservation.instances[0] print("Booting reservation: %s/%s (size: %s)" % (reservation, instance, INSTANCE_TYPE)) i = 0 while True: - if instance.state == 'pending': - print(".", end=' ') + if instance.state == "pending": + print(".", end=" ") sys.stdout.flush() instance.update() i += 1 time.sleep(i) - elif instance.state == 'running': + elif instance.state == "running": print("...booted: %s" % instance.public_dns_name) time.sleep(5) break @@ -1732,213 +1979,246 @@ def setup_ec2(): host = instance.public_dns_name env.host_string = host + # ========== # = Deploy = # ========== + @parallel def pull(master=False): with virtualenv(): - run('git pull') + run("git pull") if master: - run('git checkout master') - run('git pull') + run("git checkout master") + run("git pull") + def pre_deploy(): compress_assets(bundle=True) + @serial def post_deploy(): cleanup_assets() + def role_for_host(): for role, hosts in list(env.roledefs.items()): if env.host in hosts: return role + @parallel def deploy(fast=False, reload=False): role = role_for_host() - if role in ['work', 'search', 'debug']: + if role in ["work", "search", "debug"]: deploy_code(copy_assets=False, fast=fast, reload=True) else: deploy_code(copy_assets=False, fast=fast, reload=reload) + @parallel def deploy_web(fast=False): role = role_for_host() - if role in ['work', 'search']: + if role in ["work", "search"]: deploy_code(copy_assets=True, fast=fast, reload=True) else: deploy_code(copy_assets=True, fast=fast) + @parallel def deploy_rebuild(fast=False): deploy_code(copy_assets=True, fast=fast, rebuild=True) + @parallel def kill_gunicorn(): with virtualenv(): - sudo('pkill -9 -u %s -f gunicorn_django' % env.user) - + sudo("pkill -9 -u %s -f gunicorn_django" % env.user) + + @parallel def deploy_code(copy_assets=False, rebuild=False, fast=False, reload=False): with virtualenv(): - run('git pull') - run('mkdir -p static') + run("git pull") + run("mkdir -p static") if rebuild: - run('rm -fr static/*') + run("rm -fr static/*") if copy_assets: transfer_assets() - + with virtualenv(): with settings(warn_only=True): if reload: - sudo('supervisorctl reload') + sudo("supervisorctl reload") elif fast: kill_gunicorn() else: - sudo('kill -HUP `cat /srv/newsblur/logs/gunicorn.pid`') + sudo("kill -HUP `cat /srv/newsblur/logs/gunicorn.pid`") + @parallel def kill(): - sudo('supervisorctl reload') + sudo("supervisorctl reload") with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_gunicorn.sh') + if env.user == "ubuntu": + sudo("./utils/kill_gunicorn.sh") else: - run('./utils/kill_gunicorn.sh') + run("./utils/kill_gunicorn.sh") + @parallel def deploy_node(): pull() with virtualenv(): - run('sudo supervisorctl restart node_unread') - run('sudo supervisorctl restart node_unread_ssl') - run('sudo supervisorctl restart node_favicons') - run('sudo supervisorctl restart node_text') + run("sudo supervisorctl restart node_unread") + run("sudo supervisorctl restart node_unread_ssl") + run("sudo supervisorctl restart node_favicons") + run("sudo supervisorctl restart node_text") + def gunicorn_restart(): restart_gunicorn() + def restart_gunicorn(): with virtualenv(), settings(warn_only=True): - run('sudo supervisorctl restart gunicorn') + run("sudo supervisorctl restart gunicorn") + def gunicorn_stop(): with virtualenv(), settings(warn_only=True): - run('sudo supervisorctl stop gunicorn') + run("sudo supervisorctl stop gunicorn") + def staging(): - with cd('~/staging'): - run('git pull') - run('kill -HUP `cat logs/gunicorn.pid`') - run('curl -s http://dev.newsblur.com > /dev/null') - run('curl -s http://dev.newsblur.com/m/ > /dev/null') + with cd("~/staging"): + run("git pull") + run("kill -HUP `cat logs/gunicorn.pid`") + run("curl -s http://dev.newsblur.com > /dev/null") + run("curl -s http://dev.newsblur.com/m/ > /dev/null") + def staging_build(): - with cd('~/staging'): - run('git pull') - run('./manage.py migrate') - run('kill -HUP `cat logs/gunicorn.pid`') - run('curl -s http://dev.newsblur.com > /dev/null') - run('curl -s http://dev.newsblur.com/m/ > /dev/null') + with cd("~/staging"): + run("git pull") + run("./manage.py migrate") + run("kill -HUP `cat logs/gunicorn.pid`") + run("curl -s http://dev.newsblur.com > /dev/null") + run("curl -s http://dev.newsblur.com/m/ > /dev/null") + @parallel def celery(): celery_slow() + def celery_slow(): with virtualenv(): - run('git pull') + run("git pull") celery_stop() celery_start() + @parallel def celery_fast(): with virtualenv(): - run('git pull') + run("git pull") celery_reload() + @parallel def celery_stop(): with virtualenv(): - sudo('supervisorctl stop celery') + sudo("supervisorctl stop celery") with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_celery.sh') + if env.user == "ubuntu": + sudo("./utils/kill_celery.sh") else: - run('./utils/kill_celery.sh') + run("./utils/kill_celery.sh") + @parallel def celery_start(): with virtualenv(): - run('sudo supervisorctl start celery') - run('tail logs/newsblur.log') + run("sudo supervisorctl start celery") + run("tail logs/newsblur.log") + @parallel def celery_reload(): with virtualenv(): - run('sudo supervisorctl reload celery') - run('tail logs/newsblur.log') + run("sudo supervisorctl reload celery") + run("tail logs/newsblur.log") + def kill_celery(): with virtualenv(): with settings(warn_only=True): - if env.user == 'ubuntu': - sudo('./utils/kill_celery.sh') + if env.user == "ubuntu": + sudo("./utils/kill_celery.sh") else: - run('./utils/kill_celery.sh') + run("./utils/kill_celery.sh") + def compress_assets(bundle=False): - local('jammit -c newsblur/assets.yml --base-url https://www.newsblur.com --output static') - local('tar -czf static.tgz static/*') + local("jammit -c newsblur/assets.yml --base-url https://www.newsblur.com --output static") + local("tar -czf static.tgz static/*") tries_left = 5 while True: try: success = False with settings(warn_only=True): - local('PYTHONPATH=/srv/newsblur python utils/backups/s3.py set static.tgz') + local("PYTHONPATH=/srv/newsblur python utils/backups/s3.py set static.tgz") success = True if not success: raise Exception("Ack!") break except Exception as e: - print(" ***> %s. Trying %s more time%s..." % (e, tries_left, '' if tries_left == 1 else 's')) + print(" ***> %s. Trying %s more time%s..." % (e, tries_left, "" if tries_left == 1 else "s")) tries_left -= 1 - if tries_left <= 0: break + if tries_left <= 0: + break def transfer_assets(): # filename = "deploy_%s.tgz" % env.commit # Easy rollback? Eh, can just upload it again. # run('PYTHONPATH=/srv/newsblur python s3.py get deploy_%s.tgz' % filename) - run('PYTHONPATH=/srv/newsblur python utils/backups/s3.py get static.tgz') + run("PYTHONPATH=/srv/newsblur python utils/backups/s3.py get static.tgz") # run('mv %s static/static.tgz' % filename) - run('mv static.tgz static/static.tgz') - run('tar -xzf static/static.tgz') - run('rm -f static/static.tgz') + run("mv static.tgz static/static.tgz") + run("tar -xzf static/static.tgz") + run("rm -f static/static.tgz") + def cleanup_assets(): - local('rm -f static.tgz') + local("rm -f static.tgz") + # =========== # = Backups = # =========== + def setup_redis_backups(name=None): # crontab for redis backups, name is either none, story, sessions, pubsub - crontab = ("0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" % - (("_%s"%name) if name else "")) + crontab = ( + "0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" + % (("_%s" % name) if name else "") + ) run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') + run("crontab -l") + def setup_mongo_backups(): # crontab for mongo backups crontab = "0 4 * * * /srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py" run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') - + run("crontab -l") + + def setup_postgres_backups(): # crontab for postgres backups crontab = """ @@ -1947,64 +2227,84 @@ def setup_postgres_backups(): 0 * * * * sudo find /var/lib/postgresql/13/archive -type f -mmin +180 -delete""" run('(crontab -l ; echo "%s") | sort - | uniq - | crontab -' % crontab) - run('crontab -l') - + run("crontab -l") + + def backup_redis(name=None): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py' % (("_%s"%name) if name else "")) - + run( + "/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_redis%s.py" + % (("_%s" % name) if name else "") + ) + + def backup_mongo(): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py') + run("/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_mongo.py") + def backup_postgresql(): - run('/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_psql.py') + run("/srv/newsblur/venv/newsblur3/bin/python /srv/newsblur/utils/backups/backup_psql.py") + # =============== # = Calibration = # =============== + def sync_time(): with settings(warn_only=True): sudo("/etc/init.d/ntp stop") sudo("ntpdate pool.ntp.org") sudo("/etc/init.d/ntp start") + def setup_time_calibration(): - sudo('apt-get -y install ntp') - put('config/ntpdate.cron', '%s/' % env.NEWSBLUR_PATH) - sudo('chown root.root %s/ntpdate.cron' % env.NEWSBLUR_PATH) - sudo('chmod 755 %s/ntpdate.cron' % env.NEWSBLUR_PATH) - sudo('mv %s/ntpdate.cron /etc/cron.hourly/ntpdate' % env.NEWSBLUR_PATH) + sudo("apt-get -y install ntp") + put("config/ntpdate.cron", "%s/" % env.NEWSBLUR_PATH) + sudo("chown root.root %s/ntpdate.cron" % env.NEWSBLUR_PATH) + sudo("chmod 755 %s/ntpdate.cron" % env.NEWSBLUR_PATH) + sudo("mv %s/ntpdate.cron /etc/cron.hourly/ntpdate" % env.NEWSBLUR_PATH) with settings(warn_only=True): - sudo('/etc/cron.hourly/ntpdate') + sudo("/etc/cron.hourly/ntpdate") + # ============== # = Tasks - DB = # ============== + def restore_postgres(port=5432, download=False): with virtualenv(): - backup_date = '2020-12-03-02-51' + backup_date = "2020-12-03-02-51" yes = prompt("Dropping and creating NewsBlur PGSQL db. Sure?") - if yes != 'y': + if yes != "y": return if download: - run('mkdir -p postgres') - run('PYTHONPATH=%s python utils/backups/s3.py get postgres/backup_postgresql_%s.sql.gz' % (env.NEWSBLUR_PATH, backup_date)) + run("mkdir -p postgres") + run( + "PYTHONPATH=%s python utils/backups/s3.py get postgres/backup_postgresql_%s.sql.gz" + % (env.NEWSBLUR_PATH, backup_date) + ) # sudo('su postgres -c "createuser -p %s -U newsblur"' % (port,)) - with settings(warn_only=True): + with settings(warn_only=True): # May not exist - run('dropdb newsblur -p %s -U newsblur' % (port,), pty=False) - run('sudo -u postgres createuser newsblur -s') + run("dropdb newsblur -p %s -U newsblur" % (port,), pty=False) + run("sudo -u postgres createuser newsblur -s") # May already exist - run('createdb newsblur -p %s -O newsblur -U newsblur' % (port,), pty=False) - run('pg_restore -U newsblur -p %s --role=newsblur --dbname=newsblur /srv/newsblur/postgres/backup_postgresql_%s.sql.gz' % (port, backup_date), pty=False) + run("createdb newsblur -p %s -O newsblur -U newsblur" % (port,), pty=False) + run( + "pg_restore -U newsblur -p %s --role=newsblur --dbname=newsblur /srv/newsblur/postgres/backup_postgresql_%s.sql.gz" + % (port, backup_date), + pty=False, + ) + def restore_mongo(download=False): - backup_date = '2020-11-11-04-00' + backup_date = "2020-11-11-04-00" if download: - run('PYTHONPATH=/srv/newsblur python utils/backups/s3.py get backup_mongo_%s.tgz' % (backup_date)) - run('tar -xf backup_mongo_%s.tgz' % backup_date) - run('mongorestore backup_mongo_%s' % backup_date) + run("PYTHONPATH=/srv/newsblur python utils/backups/s3.py get backup_mongo_%s.tgz" % (backup_date)) + run("tar -xf backup_mongo_%s.tgz" % backup_date) + run("mongorestore backup_mongo_%s" % backup_date) + # ====== # = S3 = @@ -2012,48 +2312,54 @@ def restore_mongo(download=False): if django_settings: try: - ACCESS_KEY = django_settings.S3_ACCESS_KEY - SECRET = django_settings.S3_SECRET + ACCESS_KEY = django_settings.S3_ACCESS_KEY + SECRET = django_settings.S3_SECRET BUCKET_NAME = django_settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first except: print(" ---> You need to fix django's settings. Enter python and type `import settings`.") + def save_file_in_s3(filename): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) - k = Key(bucket) - k.key = filename + k = Key(bucket) + k.key = filename k.set_contents_from_filename(filename) + def get_file_from_s3(filename): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) - k = Key(bucket) - k.key = filename + k = Key(bucket) + k.key = filename k.get_contents_to_filename(filename) + def list_backup_in_s3(): - conn = S3Connection(ACCESS_KEY, SECRET) + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) for i, key in enumerate(bucket.get_all_keys()): print("[%s] %s" % (i, key.name)) + def delete_all_backups(): - #FIXME: validate filename exists - conn = S3Connection(ACCESS_KEY, SECRET) + # FIXME: validate filename exists + conn = S3Connection(ACCESS_KEY, SECRET) bucket = conn.get_bucket(BUCKET_NAME) for i, key in enumerate(bucket.get_all_keys()): print("deleting %s" % (key.name)) key.delete() + def add_revsys_keys(): put("~/Downloads/revsys-keys.pub", "revsys_keys") - run('cat revsys_keys >> ~/.ssh/authorized_keys') - run('rm revsys_keys') + run("cat revsys_keys >> ~/.ssh/authorized_keys") + run("rm revsys_keys") + def upgrade_to_virtualenv(role=None): if not role: @@ -2065,31 +2371,32 @@ def upgrade_to_virtualenv(role=None): elif role == "app": gunicorn_stop() elif role == "node": - run('sudo supervisorctl stop node_unread') - run('sudo supervisorctl stop node_favicons') + run("sudo supervisorctl stop node_unread") + run("sudo supervisorctl stop node_favicons") elif role == "work": - sudo('/etc/init.d/supervisor stop') + sudo("/etc/init.d/supervisor stop") kill_pgbouncer(bounce=False) setup_installs() pip() if role == "task": enable_celery_supervisor(update=False) - sudo('reboot') + sudo("reboot") elif role == "app": setup_gunicorn(supervisor=True, restart=False) - sudo('reboot') + sudo("reboot") elif role == "node": deploy_node() elif role == "search": setup_db_search() elif role == "work": enable_celerybeat() - sudo('reboot') + sudo("reboot") + def benchmark(): - run('curl -s https://packagecloud.io/install/repositories/akopytov/sysbench/script.deb.sh | sudo bash') - sudo('apt-get install -y sysbench') - run('sysbench cpu --cpu-max-prime=20000 run') - run('sysbench fileio --file-total-size=150G prepare') - run('sysbench fileio --file-total-size=150G --file-test-mode=rndrw --time=300 --max-requests=0 run') - run('sysbench fileio --file-total-size=150G cleanup') + run("curl -s https://packagecloud.io/install/repositories/akopytov/sysbench/script.deb.sh | sudo bash") + sudo("apt-get install -y sysbench") + run("sysbench cpu --cpu-max-prime=20000 run") + run("sysbench fileio --file-total-size=150G prepare") + run("sysbench fileio --file-total-size=150G --file-test-mode=rndrw --time=300 --max-requests=0 run") + run("sysbench fileio --file-total-size=150G cleanup") diff --git a/archive/jammit.py b/archive/jammit.py index f1020547e4..e3b83ecca1 100644 --- a/archive/jammit.py +++ b/archive/jammit.py @@ -8,10 +8,10 @@ MHTML_START = "" + class JammitAssets: + ASSET_FILENAME = "assets.yml" - ASSET_FILENAME = 'assets.yml' - def __init__(self, assets_dir): """ Initializes the Jammit object by reading the assets.yml file and @@ -20,31 +20,31 @@ def __init__(self, assets_dir): """ self.assets_dir = assets_dir self.assets = self.read_assets() - + def read_assets(self): """ Read the assets from the YAML and store it as a lookup dictionary. """ filepath = os.path.join(self.assets_dir, self.ASSET_FILENAME) - with open(filepath, 'r') as yaml_file: + with open(filepath, "r") as yaml_file: return yaml.safe_load(yaml_file) - + def render_tags(self, asset_type, asset_package): """ Returns rendered ' % path - + def javascript_tag_compressed(self, asset_package, asset_type_ext): - filename = 'static/%s.%s' % (asset_package, asset_type_ext) + filename = "static/%s.%s" % (asset_package, asset_type_ext) asset_mtime = int(os.path.getmtime(filename)) - path = '%s?%s' % (filename, asset_mtime) + path = "%s?%s" % (filename, asset_mtime) return self.javascript_tag(path) - + def stylesheet_tag(self, path): return '' % path def stylesheet_tag_compressed(self, asset_package, asset_type_ext): - datauri_filename = 'static/%s-datauri.%s' % (asset_package, asset_type_ext) - original_filename = 'static/%s.%s' % (asset_package, asset_type_ext) + datauri_filename = "static/%s-datauri.%s" % (asset_package, asset_type_ext) + original_filename = "static/%s.%s" % (asset_package, asset_type_ext) asset_mtime = int(os.path.getmtime(datauri_filename)) - datauri_path = '%s?%s' % (datauri_filename, asset_mtime) - original_path = '%s?%s' % (original_filename, asset_mtime) - - return '\n'.join([ - DATA_URI_START, - self.stylesheet_tag(datauri_path), - DATA_URI_END, - MHTML_START, - self.stylesheet_tag(original_path), - MHTML_END, - ]) + datauri_path = "%s?%s" % (datauri_filename, asset_mtime) + original_path = "%s?%s" % (original_filename, asset_mtime) + + return "\n".join( + [ + DATA_URI_START, + self.stylesheet_tag(datauri_path), + DATA_URI_END, + MHTML_START, + self.stylesheet_tag(original_path), + MHTML_END, + ] + ) -class FileFinder: +class FileFinder: @classmethod def filefinder(cls, pattern): paths = [] - if '**' in pattern: - folder, wild, pattern = pattern.partition('/**/') + if "**" in pattern: + folder, wild, pattern = pattern.partition("/**/") for f in cls.recursive_find_files(folder, pattern): paths.append(f) else: diff --git a/archive/munin/munin/__init__.py b/archive/munin/munin/__init__.py index 67fa6a55c5..aa3462fc87 100755 --- a/archive/munin/munin/__init__.py +++ b/archive/munin/munin/__init__.py @@ -1,4 +1,3 @@ - __version__ = "1.4" import os @@ -6,6 +5,7 @@ import socket from decimal import Decimal + class MuninPlugin(object): title = "" args = None @@ -15,10 +15,10 @@ class MuninPlugin(object): fields = [] def __init__(self): - if 'GRAPH_TITLE' in os.environ: - self.title = os.environ['GRAPH_TITLE'] - if 'GRAPH_CATEGORY' in os.environ: - self.category = os.environ['GRAPH_CATEGORY'] + if "GRAPH_TITLE" in os.environ: + self.title = os.environ["GRAPH_TITLE"] + if "GRAPH_CATEGORY" in os.environ: + self.category = os.environ["GRAPH_CATEGORY"] super(MuninPlugin, self).__init__() def autoconf(self): @@ -26,18 +26,18 @@ def autoconf(self): def config(self): conf = [] - for k in ('title', 'category', 'args', 'vlabel', 'info', 'scale', 'order'): + for k in ("title", "category", "args", "vlabel", "info", "scale", "order"): v = getattr(self, k, None) if v is not None: if isinstance(v, bool): v = v and "yes" or "no" elif isinstance(v, (tuple, list)): v = " ".join(v) - conf.append('graph_%s %s' % (k, v)) + conf.append("graph_%s %s" % (k, v)) for field_name, field_args in self.fields: for arg_name, arg_value in field_args.items(): - conf.append('%s.%s %s' % (field_name, arg_name, arg_value)) + conf.append("%s.%s %s" % (field_name, arg_name, arg_value)) print("\n".join(conf)) @@ -45,7 +45,7 @@ def suggest(self): sys.exit(1) def run(self): - cmd = ((len(sys.argv) > 1) and sys.argv[1] or None) or "execute" + cmd = ((len(sys.argv) > 1) and sys.argv[1] or None) or "execute" if cmd == "execute": values = self.execute() if values: @@ -67,11 +67,12 @@ def run(self): self.suggest() sys.exit(0) + class MuninClient(object): def __init__(self, host, port=4949): self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.sock.connect((host, port)) - self.sock.recv(4096) # welcome, TODO: receive all + self.sock.recv(4096) # welcome, TODO: receive all def _command(self, cmd, term): self.sock.send("%s\n" % cmd) @@ -81,15 +82,15 @@ def _command(self, cmd, term): return buf.split(term)[0] def list(self): - return self._command('list', '\n').split(' ') + return self._command("list", "\n").split(" ") def fetch(self, service): data = self._command("fetch %s" % service, ".\n") - if data.startswith('#'): + if data.startswith("#"): raise Exception(data[2:]) values = {} - for line in data.split('\n'): + for line in data.split("\n"): if line: - k, v = line.split(' ', 1) - values[k.split('.')[0]] = Decimal(v) + k, v = line.split(" ", 1) + values[k.split(".")[0]] = Decimal(v) return values diff --git a/archive/munin/munin/cassandra.py b/archive/munin/munin/cassandra.py index f5a75405c4..53ea4271f1 100755 --- a/archive/munin/munin/cassandra.py +++ b/archive/munin/munin/cassandra.py @@ -8,6 +8,7 @@ space_re = re.compile(r"\s+") + class MuninCassandraPlugin(MuninPlugin): category = "Cassandra" @@ -15,7 +16,7 @@ def __init__(self, *args, **kwargs): super(MuninCassandraPlugin, self).__init__(*args, **kwargs) self.nodetool_path = os.environ["NODETOOL_PATH"] self.host = socket.gethostname() - self.keyspaces = [x for x in os.environ.get('CASSANDRA_KEYSPACE', '').split(',') if x] + self.keyspaces = [x for x in os.environ.get("CASSANDRA_KEYSPACE", "").split(",") if x] def execute_nodetool(self, cmd): p = Popen([self.nodetool_path, "-host", self.host, cmd], stdout=PIPE) @@ -23,22 +24,22 @@ def execute_nodetool(self, cmd): return output def parse_cfstats(self, text): - text = text.strip().split('\n') + text = text.strip().split("\n") cfstats = {} cf = None for line in text: line = line.strip() - if not line or line.startswith('-'): + if not line or line.startswith("-"): continue - name, value = line.strip().split(': ', 1) + name, value = line.strip().split(": ", 1) if name == "Keyspace": - ks = {'cf': {}} + ks = {"cf": {}} cf = None cfstats[value] = ks elif name == "Column Family": cf = {} - ks['cf'][value] = cf + ks["cf"][value] = cf elif cf is None: ks[name] = value else: @@ -50,30 +51,30 @@ def cfstats(self): def cinfo(self): text = self.execute_nodetool("info") - lines = text.strip().split('\n') + lines = text.strip().split("\n") token = lines[0] info = {} for l in lines[1:]: - name, value = l.split(':') + name, value = l.split(":") info[name.strip()] = value.strip() - l_num, l_units = info['Load'].split(' ', 1) + l_num, l_units = info["Load"].split(" ", 1) l_num = float(l_num) if l_units == "KB": scale = 1024 elif l_units == "MB": - scale = 1024*1024 + scale = 1024 * 1024 elif l_units == "GB": - scale = 1024*1024*1024 + scale = 1024 * 1024 * 1024 elif l_units == "TB": - scale = 1024*1024*1024*1024 - info['Load'] = int(l_num * scale) - info['token'] = token + scale = 1024 * 1024 * 1024 * 1024 + info["Load"] = int(l_num * scale) + info["token"] = token return info def tpstats(self): out = self.execute_nodetool("tpstats") tpstats = {} - for line in out.strip().split('\n')[1:]: + for line in out.strip().split("\n")[1:]: name, active, pending, completed = space_re.split(line) tpstats[name] = dict(active=int(active), pending=int(pending), completed=int(completed)) return tpstats diff --git a/archive/munin/munin/ddwrt.py b/archive/munin/munin/ddwrt.py index b053e14ea7..2bebf3612d 100755 --- a/archive/munin/munin/ddwrt.py +++ b/archive/munin/munin/ddwrt.py @@ -1,4 +1,3 @@ - # https://192.168.1.10/Info.live.htm import os @@ -6,18 +5,16 @@ import urllib.request from vendor.munin import MuninPlugin + class DDWrtPlugin(MuninPlugin): category = "Wireless" def __init__(self): super(DDWrtPlugin, self).__init__() - self.root_url = os.environ.get('DDWRT_URL') or "http://192.168.1.1" + self.root_url = os.environ.get("DDWRT_URL") or "http://192.168.1.1" self.url = self.root_url + "/Info.live.htm" def get_info(self): res = urllib.request.urlopen(self.url) text = res.read() - return dict( - x[1:-1].split('::') - for x in text.split('\n') - ) + return dict(x[1:-1].split("::") for x in text.split("\n")) diff --git a/archive/munin/munin/gearman.py b/archive/munin/munin/gearman.py index cf5a1a86e8..7165d3855d 100755 --- a/archive/munin/munin/gearman.py +++ b/archive/munin/munin/gearman.py @@ -5,16 +5,17 @@ import socket from vendor.munin import MuninPlugin -worker_re = re.compile(r'^(?P\d+) (?P[\d\.]+) (?P[^\s]+) :\s?(?P.*)$') +worker_re = re.compile(r"^(?P\d+) (?P[\d\.]+) (?P[^\s]+) :\s?(?P.*)$") + class MuninGearmanPlugin(MuninPlugin): category = "Gearman" def __init__(self): super(MuninGearmanPlugin, self).__init__() - addr = os.environ.get('GM_SERVER') or "127.0.0.1" - port = int(addr.split(':')[-1]) if ':' in addr else 4730 - host = addr.split(':')[0] + addr = os.environ.get("GM_SERVER") or "127.0.0.1" + port = int(addr.split(":")[-1]) if ":" in addr else 4730 + host = addr.split(":")[0] self.addr = (host, port) self._sock = None @@ -36,12 +37,12 @@ def get_workers(self): buf += sock.recv(8192) info = [] - for l in buf.split('\n'): - if l.strip() == '.': + for l in buf.split("\n"): + if l.strip() == ".": break m = worker_re.match(l) i = m.groupdict() - i['abilities'] = [x for x in i['abilities'].split(' ') if x] + i["abilities"] = [x for x in i["abilities"].split(" ") if x] info.append(i) return info @@ -53,14 +54,14 @@ def get_status(self): buf += sock.recv(8192) info = {} - for l in buf.split('\n'): + for l in buf.split("\n"): l = l.strip() - if l == '.': + if l == ".": break - counts = l.split('\t') + counts = l.split("\t") info[counts[0]] = dict( - total = int(counts[1]), - running = int(counts[2]), - workers = int(counts[3]), + total=int(counts[1]), + running=int(counts[2]), + workers=int(counts[3]), ) return info diff --git a/archive/munin/munin/memcached.py b/archive/munin/munin/memcached.py index 0663344576..72d6590f1c 100755 --- a/archive/munin/munin/memcached.py +++ b/archive/munin/munin/memcached.py @@ -4,6 +4,7 @@ import socket from vendor.munin import MuninPlugin + class MuninMemcachedPlugin(MuninPlugin): category = "Memcached" @@ -15,16 +16,16 @@ def autoconf(self): return True def get_stats(self): - host = os.environ.get('MEMCACHED_HOST') or '127.0.0.1' - port = int(os.environ.get('MEMCACHED_PORT') or '11211') + host = os.environ.get("MEMCACHED_HOST") or "127.0.0.1" + port = int(os.environ.get("MEMCACHED_PORT") or "11211") s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send("stats\n") buf = "" - while 'END\r\n' not in buf: + while "END\r\n" not in buf: buf += s.recv(1024) - stats = (x.split(' ', 2) for x in buf.split('\r\n')) - stats = dict((x[1], x[2]) for x in stats if x[0] == 'STAT') + stats = (x.split(" ", 2) for x in buf.split("\r\n")) + stats = dict((x[1], x[2]) for x in stats if x[0] == "STAT") s.close() return stats diff --git a/archive/munin/munin/mongodb.py b/archive/munin/munin/mongodb.py index 4011040529..920805a15d 100755 --- a/archive/munin/munin/mongodb.py +++ b/archive/munin/munin/mongodb.py @@ -4,6 +4,7 @@ import sys from vendor.munin import MuninPlugin + class MuninMongoDBPlugin(MuninPlugin): dbname_in_args = False category = "MongoDB" @@ -13,13 +14,13 @@ def __init__(self): self.dbname = None if self.dbname_in_args: - self.dbname = sys.argv[0].rsplit('_', 1)[-1] + self.dbname = sys.argv[0].rsplit("_", 1)[-1] if not self.dbname: - self.dbname = os.environ.get('MONGODB_DATABASE') + self.dbname = os.environ.get("MONGODB_DATABASE") - host = os.environ.get('MONGODB_SERVER') or 'localhost' - if ':' in host: - host, port = host.split(':') + host = os.environ.get("MONGODB_SERVER") or "localhost" + if ":" in host: + host, port = host.split(":") port = int(port) else: port = 27017 @@ -27,14 +28,15 @@ def __init__(self): @property def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import pymongo + self._connection = pymongo.MongoClient(self.server[0], self.server[1]) return self._connection @property def db(self): - if not hasattr(self, '_db'): + if not hasattr(self, "_db"): self._db = getattr(self.connection, self.dbname) return self._db diff --git a/archive/munin/munin/mysql.py b/archive/munin/munin/mysql.py index 119de734dd..542678cca0 100755 --- a/archive/munin/munin/mysql.py +++ b/archive/munin/munin/mysql.py @@ -2,6 +2,7 @@ from configparser import SafeConfigParser from vendor.munin import MuninPlugin + class MuninMySQLPlugin(MuninPlugin): dbname_in_args = False category = "MySQL" @@ -9,12 +10,15 @@ class MuninMySQLPlugin(MuninPlugin): def __init__(self): super(MuninMySQLPlugin, self).__init__() - self.dbname = ((sys.argv[0].rsplit('_', 1)[-1] if self.dbname_in_args else None) - or os.environ.get('DATABASE') or self.default_table) + self.dbname = ( + (sys.argv[0].rsplit("_", 1)[-1] if self.dbname_in_args else None) + or os.environ.get("DATABASE") + or self.default_table + ) self.conninfo = dict( - user = "root", - host = "localhost", + user="root", + host="localhost", ) cnfpath = "" @@ -34,19 +38,25 @@ def __init__(self): for section in ["client", "munin"]: if not cnf.has_section(section): continue - for connkey, opt in [("user", "user"), ("passwd", "password"), ("host", "host"), ("port", "port")]: + for connkey, opt in [ + ("user", "user"), + ("passwd", "password"), + ("host", "host"), + ("port", "port"), + ]: if cnf.has_option(section, opt): self.conninfo[connkey] = cnf.get(section, opt) - for k in ('user', 'passwd', 'host', 'port'): + for k in ("user", "passwd", "host", "port"): # Use lowercase because that's what the existing mysql plugins do v = os.environ.get(k) if v: self.conninfo[k] = v def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import MySQLdb + self._connection = MySQLdb.connect(**self.conninfo) return self._connection diff --git a/archive/munin/munin/nginx.py b/archive/munin/munin/nginx.py index 383e3af127..3e386f8fcc 100755 --- a/archive/munin/munin/nginx.py +++ b/archive/munin/munin/nginx.py @@ -5,6 +5,7 @@ import urllib.request from vendor.munin import MuninPlugin + class MuninNginxPlugin(MuninPlugin): category = "Nginx" @@ -12,11 +13,12 @@ class MuninNginxPlugin(MuninPlugin): r"Active connections:\s+(?P\d+)\s+" r"server accepts handled requests\s+" r"(?P\d+)\s+(?P\d+)\s+(?P\d+)\s+" - r"Reading: (?P\d+) Writing: (?P\d+) Waiting: (?P\d+)") + r"Reading: (?P\d+) Writing: (?P\d+) Waiting: (?P\d+)" + ) def __init__(self): super(MuninNginxPlugin, self).__init__() - self.url = os.environ.get('NX_STATUS_URL') or "http://localhost/nginx_status" + self.url = os.environ.get("NX_STATUS_URL") or "http://localhost/nginx_status" def autoconf(self): return bool(self.get_status()) diff --git a/archive/munin/munin/pgbouncer.py b/archive/munin/munin/pgbouncer.py index d8f3dd96ff..4e9f146090 100755 --- a/archive/munin/munin/pgbouncer.py +++ b/archive/munin/munin/pgbouncer.py @@ -1,6 +1,7 @@ import sys from vendor.munin.postgres import MuninPostgresPlugin + class MuninPgBouncerPlugin(MuninPostgresPlugin): dbname_in_args = False default_table = "pgbouncer" @@ -8,11 +9,12 @@ class MuninPgBouncerPlugin(MuninPostgresPlugin): def __init__(self, *args, **kwargs): super(MuninPgBouncerPlugin, self).__init__(*args, **kwargs) - self.dbwatched = sys.argv[0].rsplit('_', 1)[-1] + self.dbwatched = sys.argv[0].rsplit("_", 1)[-1] def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import psycopg2 + self._connection = psycopg2.connect(self.dsn) self._connection.set_isolation_level(0) return self._connection @@ -25,9 +27,8 @@ def execute(self): totals = dict.fromkeys((field[0] for field in self.fields), 0) for row in cursor: row_dict = dict(zip(columns, row)) - if row_dict['database'] in (self.dbwatched, self.dbwatched + '\x00'): + if row_dict["database"] in (self.dbwatched, self.dbwatched + "\x00"): for field in self.fields: totals[field[0]] += row_dict[field[0]] return dict((field[0], totals[field[0]]) for field in self.fields) - diff --git a/archive/munin/munin/postgres.py b/archive/munin/munin/postgres.py index 541c25bae7..42f7022ae5 100755 --- a/archive/munin/munin/postgres.py +++ b/archive/munin/munin/postgres.py @@ -1,7 +1,7 @@ - import os, sys from vendor.munin import MuninPlugin + class MuninPostgresPlugin(MuninPlugin): dbname_in_args = False category = "PostgreSQL" @@ -10,18 +10,22 @@ class MuninPostgresPlugin(MuninPlugin): def __init__(self): super(MuninPostgresPlugin, self).__init__() - self.dbname = ((sys.argv[0].rsplit('_', 1)[-1] if self.dbname_in_args else None) - or os.environ.get('PGDATABASE') or self.default_table) + self.dbname = ( + (sys.argv[0].rsplit("_", 1)[-1] if self.dbname_in_args else None) + or os.environ.get("PGDATABASE") + or self.default_table + ) dsn = ["dbname='%s'" % self.dbname] - for k in ('user', 'password', 'host', 'port'): - v = os.environ.get('DB%s' % k.upper()) + for k in ("user", "password", "host", "port"): + v = os.environ.get("DB%s" % k.upper()) if v: dsn.append("db%s='%s'" % (k, v)) - self.dsn = ' '.join(dsn) + self.dsn = " ".join(dsn) def connection(self): - if not hasattr(self, '_connection'): + if not hasattr(self, "_connection"): import psycopg2 + self._connection = psycopg2.connect(self.dsn) return self._connection @@ -32,13 +36,14 @@ def autoconf(self): return bool(self.connection()) def tables(self): - if not hasattr(self, '_tables'): + if not hasattr(self, "_tables"): c = self.cursor() c.execute( "SELECT c.relname FROM pg_catalog.pg_class c" " LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace" " WHERE c.relkind IN ('r','')" " AND n.nspname NOT IN ('pg_catalog', 'pg_toast')" - " AND pg_catalog.pg_table_is_visible(c.oid)") + " AND pg_catalog.pg_table_is_visible(c.oid)" + ) self._tables = [r[0] for r in c.fetchall()] return self._tables diff --git a/archive/munin/munin/redis.py b/archive/munin/munin/redis.py index a569adc06c..00818075b2 100755 --- a/archive/munin/munin/redis.py +++ b/archive/munin/munin/redis.py @@ -4,6 +4,7 @@ import socket from vendor.munin import MuninPlugin + class MuninRedisPlugin(MuninPlugin): category = "Redis" @@ -15,9 +16,9 @@ def autoconf(self): return True def get_info(self): - host = os.environ.get('REDIS_HOST') or '127.0.0.1' - port = int(os.environ.get('REDIS_PORT') or '6379') - if host.startswith('/'): + host = os.environ.get("REDIS_HOST") or "127.0.0.1" + port = int(os.environ.get("REDIS_PORT") or "6379") + if host.startswith("/"): s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) s.connect(host) else: @@ -25,9 +26,9 @@ def get_info(self): s.connect((host, port)) s.send("*1\r\n$4\r\ninfo\r\n") buf = "" - while '\r\n' not in buf: + while "\r\n" not in buf: buf += s.recv(1024) - l, buf = buf.split('\r\n', 1) + l, buf = buf.split("\r\n", 1) if l[0] != "$": s.close() raise Exception("Protocol error") @@ -35,7 +36,7 @@ def get_info(self): if remaining > 0: buf += s.recv(remaining) s.close() - return dict(x.split(':', 1) for x in buf.split('\r\n') if ':' in x) + return dict(x.split(":", 1) for x in buf.split("\r\n") if ":" in x) def execute(self): stats = self.get_info() diff --git a/archive/munin/munin/riak.py b/archive/munin/munin/riak.py index 7d30e48ca4..9d15b27b0f 100755 --- a/archive/munin/munin/riak.py +++ b/archive/munin/munin/riak.py @@ -9,15 +9,16 @@ import urllib.request from vendor.munin import MuninPlugin + class MuninRiakPlugin(MuninPlugin): category = "Riak" def __init__(self): super(MuninRiakPlugin, self).__init__() - host = os.environ.get('RIAK_HOST') or 'localhost' - if ':' in host: - host, port = host.split(':') + host = os.environ.get("RIAK_HOST") or "localhost" + if ":" in host: + host, port = host.split(":") port = int(port) else: port = 8098 diff --git a/config/gunicorn_conf.py b/config/gunicorn_conf.py index f7b0234752..866639a646 100644 --- a/config/gunicorn_conf.py +++ b/config/gunicorn_conf.py @@ -3,7 +3,7 @@ import psutil -GIGS_OF_MEMORY = psutil.virtual_memory().total/1024/1024/1024. +GIGS_OF_MEMORY = psutil.virtual_memory().total / 1024 / 1024 / 1024.0 NUM_CPUS = psutil.cpu_count() bind = "0.0.0.0:8000" @@ -27,12 +27,12 @@ if workers > 16: workers = 16 -if os.environ.get('DOCKERBUILD', False): +if os.environ.get("DOCKERBUILD", False): workers = 2 -prom_folder = '/srv/newsblur/.prom_cache' +prom_folder = "/srv/newsblur/.prom_cache" os.makedirs(prom_folder, exist_ok=True) -os.environ['PROMETHEUS_MULTIPROC_DIR'] = prom_folder +os.environ["PROMETHEUS_MULTIPROC_DIR"] = prom_folder for filename in os.listdir(prom_folder): file_path = os.path.join(prom_folder, filename) try: @@ -41,7 +41,7 @@ elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - print('Failed to delete %s. Reason: %s' % (file_path, e)) + print("Failed to delete %s. Reason: %s" % (file_path, e)) from prometheus_client import multiprocess diff --git a/config/pystartup.py b/config/pystartup.py index 949523ccc6..ab64160a21 100644 --- a/config/pystartup.py +++ b/config/pystartup.py @@ -16,23 +16,37 @@ historyPath = os.path.expanduser("~/.pyhistory") historyTmp = os.path.expanduser("~/.pyhisttmp.py") -endMarkerStr= "# # # histDUMP # # #" +endMarkerStr = "# # # histDUMP # # #" -saveMacro= "import readline; readline.write_history_file('"+historyTmp+"'); \ +saveMacro = ( + "import readline; readline.write_history_file('" + + historyTmp + + "'); \ print '####>>>>>>>>>>'; print ''.join(filter(lambda lineP: \ - not lineP.strip().endswith('"+endMarkerStr+"'), \ - open('"+historyTmp+"').readlines())[:])+'####<<<<<<<<<<'"+endMarkerStr + not lineP.strip().endswith('" + + endMarkerStr + + "'), \ + open('" + + historyTmp + + "').readlines())[:])+'####<<<<<<<<<<'" + + endMarkerStr +) + +readline.parse_and_bind("tab: complete") +readline.parse_and_bind('\C-w: "' + saveMacro + '"') -readline.parse_and_bind('tab: complete') -readline.parse_and_bind('\C-w: "'+saveMacro+'"') def save_history(historyPath=historyPath, endMarkerStr=endMarkerStr): import readline + readline.write_history_file(historyPath) # Now filter out those line containing the saveMacro - lines= filter(lambda lineP, endMarkerStr=endMarkerStr: - not lineP.strip().endswith(endMarkerStr), open(historyPath).readlines()) - open(historyPath, 'w+').write(''.join(lines)) + lines = filter( + lambda lineP, endMarkerStr=endMarkerStr: not lineP.strip().endswith(endMarkerStr), + open(historyPath).readlines(), + ) + open(historyPath, "w+").write("".join(lines)) + if os.path.exists(historyPath): readline.read_history_file(historyPath) @@ -40,4 +54,4 @@ def save_history(historyPath=historyPath, endMarkerStr=endMarkerStr): atexit.register(save_history) del os, atexit, readline, rlcompleter, save_history, historyPath -del historyTmp, endMarkerStr, saveMacro \ No newline at end of file +del historyTmp, endMarkerStr, saveMacro diff --git a/flask_metrics/flask_metrics_haproxy.py b/flask_metrics/flask_metrics_haproxy.py index 7576584eef..86cb4ccd2d 100644 --- a/flask_metrics/flask_metrics_haproxy.py +++ b/flask_metrics/flask_metrics_haproxy.py @@ -19,22 +19,23 @@ STATUS_MAPPING = { - "UNK": 0, # unknown - "INI": 1, # initializing + "UNK": 0, # unknown + "INI": 1, # initializing "SOCKERR": 2, # socket error - "L4OK": 3, # check passed on layer 4, no upper layers testing enabled - "L4TOUT": 4, # layer 1-4 timeout - "L4CON": 5, # layer 1-4 connection problem, for example "Connection refused" (tcp rst) or "No route to host" (icmp) - "L6OK": 6, # check passed on layer 6 - "L6TOUT": 7, # layer 6 (SSL) timeout - "L6RSP": 8, # layer 6 invalid response - protocol error - "L7OK": 9, # check passed on layer 7 - "L7OKC": 10, # check conditionally passed on layer 7, for example 404 with disable-on-404 + "L4OK": 3, # check passed on layer 4, no upper layers testing enabled + "L4TOUT": 4, # layer 1-4 timeout + "L4CON": 5, # layer 1-4 connection problem, for example "Connection refused" (tcp rst) or "No route to host" (icmp) + "L6OK": 6, # check passed on layer 6 + "L6TOUT": 7, # layer 6 (SSL) timeout + "L6RSP": 8, # layer 6 invalid response - protocol error + "L7OK": 9, # check passed on layer 7 + "L7OKC": 10, # check conditionally passed on layer 7, for example 404 with disable-on-404 "L7TOUT": 11, # layer 7 (HTTP/SMTP) timeout - "L7RSP": 12, # layer 7 invalid response - protocol error - "L7STS": 13, # layer 7 response error, for example HTTP 5xx + "L7RSP": 12, # layer 7 invalid response - protocol error + "L7STS": 13, # layer 7 response error, for example HTTP 5xx } + def format_state_data(label, data): formatted_data = {} for k, v in data.items(): @@ -42,37 +43,37 @@ def format_state_data(label, data): formatted_data[k] = f'{label}{{servername="{k}"}} {STATUS_MAPPING[v.strip()]}' return formatted_data + def fetch_states(): - res = requests.get('https://newsblur.com:1936/;csv', auth=HTTPBasicAuth('gimmiestats', 'StatsGiver')) + res = requests.get("https://newsblur.com:1936/;csv", auth=HTTPBasicAuth("gimmiestats", "StatsGiver")) - lines = res.content.decode('utf-8').split('\n') + lines = res.content.decode("utf-8").split("\n") header_line = lines[0].split(",") - check_status_index = header_line.index('check_status') - servername_index = header_line.index('svname') + check_status_index = header_line.index("check_status") + servername_index = header_line.index("svname") data = {} backends = [line.split(",") for line in lines[1:]] for backend_data in backends: - if len(backend_data) <= check_status_index: continue - if len(backend_data) <= servername_index: continue - if backend_data[servername_index] in ['FRONTEND', 'BACKEND']: continue + if len(backend_data) <= check_status_index: + continue + if len(backend_data) <= servername_index: + continue + if backend_data[servername_index] in ["FRONTEND", "BACKEND"]: + continue backend_status = backend_data[check_status_index].replace("*", "") data[backend_data[servername_index]] = backend_status - + return data @app.route("/state/") def haproxy_state(): backends = fetch_states() - + formatted_data = format_state_data("haproxy_state", backends) - context = { - 'chart_name': 'haproxy_state', - 'chart_type': 'gauge', - 'data': formatted_data - } - html_body = render_template('prometheus_data.html', **context) + context = {"chart_name": "haproxy_state", "chart_type": "gauge", "data": formatted_data} + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_metrics/flask_metrics_mongo.py b/flask_metrics/flask_metrics_mongo.py index 4eee7a501f..ebbad8ade4 100644 --- a/flask_metrics/flask_metrics_mongo.py +++ b/flask_metrics/flask_metrics_mongo.py @@ -17,10 +17,13 @@ if settings.DOCKERBUILD: connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['host']}") else: - connection = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin") + connection = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin" + ) MONGO_HOST = settings.SERVER_NAME + @app.route("/objects/") def objects(): try: @@ -31,44 +34,44 @@ def objects(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict(objects=stats['objects']) + data = dict(objects=stats["objects"]) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_objects{{db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'objects', - "chart_type": 'gauge', + "chart_name": "objects", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/mongo-replset-lag/") def repl_set_lag(): def _get_oplog_length(): - oplog = connection.rs.command('printReplicationInfo') - last_op = oplog.find({}, {'ts': 1}).sort([('$natural', -1)]).limit(1)[0]['ts'].time - first_op = oplog.find({}, {'ts': 1}).sort([('$natural', 1)]).limit(1)[0]['ts'].time + oplog = connection.rs.command("printReplicationInfo") + last_op = oplog.find({}, {"ts": 1}).sort([("$natural", -1)]).limit(1)[0]["ts"].time + first_op = oplog.find({}, {"ts": 1}).sort([("$natural", 1)]).limit(1)[0]["ts"].time oplog_length = last_op - first_op return oplog_length def _get_max_replication_lag(): PRIMARY_STATE = 1 SECONDARY_STATE = 2 - status = connection.admin.command('replSetGetStatus') - members = status['members'] + status = connection.admin.command("replSetGetStatus") + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: raise Exception("Replica set is not healthy") @@ -86,7 +89,7 @@ def _get_max_replication_lag(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - + formatted_data = {} for k, v in oplog_length.items(): formatted_data[k] = f'mongo_oplog{{type="length", db="{MONGO_HOST}"}} {v}' @@ -95,10 +98,10 @@ def _get_max_replication_lag(): context = { "data": formatted_data, - "chart_name": 'oplog_metrics', - "chart_type": 'gauge', + "chart_name": "oplog_metrics", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @@ -112,52 +115,49 @@ def size(): return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict(size=stats['fsUsedSize']) + data = dict(size=stats["fsUsedSize"]) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_db_size{{db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'db_size_bytes', - "chart_type": 'gauge', + "chart_name": "db_size_bytes", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/ops/") def ops(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict( - (q, status["opcounters"][q]) - for q in status['opcounters'].keys() - ) - + data = dict((q, status["opcounters"][q]) for q in status["opcounters"].keys()) + formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_ops{{type="{k}", db="{MONGO_HOST}"}} {v}' - + context = { "data": formatted_data, - "chart_name": 'ops', - "chart_type": 'gauge', + "chart_name": "ops", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/page-faults/") def page_faults(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: @@ -165,7 +165,7 @@ def page_faults(): except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) try: - value = status['extra_info']['page_faults'] + value = status["extra_info"]["page_faults"] except KeyError: value = "U" data = dict(page_faults=value) @@ -175,37 +175,34 @@ def page_faults(): context = { "data": formatted_data, - "chart_name": 'page_faults', - "chart_type": 'counter', + "chart_name": "page_faults", + "chart_type": "counter", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/page-queues/") def page_queues(): try: - status = connection.admin.command('serverStatus') + status = connection.admin.command("serverStatus") except pymongo.errors.ServerSelectionTimeoutError as e: return Response(f"Server selection timeout: {e}", 500) except pymongo.errors.OperationFailure as e: return Response(f"Operation failure: {e}", 500) except pymongo.errors.NotMasterError as e: return Response(f"NotMaster error: {e}", 500) - data = dict( - (q, status["globalLock"]["currentQueue"][q]) - for q in ("readers", "writers") - ) + data = dict((q, status["globalLock"]["currentQueue"][q]) for q in ("readers", "writers")) formatted_data = {} for k, v in data.items(): formatted_data[k] = f'mongo_page_queues{{type="{k}", db="{MONGO_HOST}"}} {v}' context = { "data": formatted_data, - "chart_name": 'queues', - "chart_type": 'gauge', + "chart_name": "queues", + "chart_type": "gauge", } - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_metrics/flask_metrics_redis.py b/flask_metrics/flask_metrics_redis.py index 44bb4fb147..21322433f2 100644 --- a/flask_metrics/flask_metrics_redis.py +++ b/flask_metrics/flask_metrics_redis.py @@ -15,14 +15,14 @@ app = Flask(__name__) INSTANCES = { - 'db-redis-session': settings.REDIS_SESSIONS, - 'db-redis-story': settings.REDIS_STORY, - 'db-redis-pubsub': settings.REDIS_PUBSUB, - 'db-redis-user': settings.REDIS_USER, + "db-redis-session": settings.REDIS_SESSIONS, + "db-redis-story": settings.REDIS_STORY, + "db-redis-pubsub": settings.REDIS_PUBSUB, + "db-redis-user": settings.REDIS_USER, } -class RedisMetric(object): +class RedisMetric(object): def __init__(self, title, fields): self.title = title self.fields = fields @@ -36,17 +36,17 @@ def redis_servers_stats(self): if not settings.DOCKERBUILD and instance not in settings.SERVER_NAME: continue self.host = f"{settings.SERVER_NAME}.node.nyc1.consul" - if instance == 'db-redis-session': - self.port = redis_config.get('port', settings.REDIS_SESSION_PORT) - elif instance == 'db-redis-story': - self.port = redis_config.get('port', settings.REDIS_STORY_PORT) - elif instance == 'db-redis-pubsub': - self.port = redis_config.get('port', settings.REDIS_PUBSUB_PORT) - elif instance == 'db-redis-user': - self.port = redis_config.get('port', settings.REDIS_USER_PORT) + if instance == "db-redis-session": + self.port = redis_config.get("port", settings.REDIS_SESSION_PORT) + elif instance == "db-redis-story": + self.port = redis_config.get("port", settings.REDIS_STORY_PORT) + elif instance == "db-redis-pubsub": + self.port = redis_config.get("port", settings.REDIS_PUBSUB_PORT) + elif instance == "db-redis-user": + self.port = redis_config.get("port", settings.REDIS_USER_PORT) stats = self.get_info() yield instance, stats - + def execute(self): data = {} for instance, stats in self.redis_servers_stats(): @@ -61,136 +61,154 @@ def execute(self): return data def format_data(self, data): - label = self.fields[0][1]['label'] + label = self.fields[0][1]["label"] formatted_data = {} for k, v in data.items(): formatted_data[k] = f'{label}{{db="{k}"}} {v[self.fields[0][0]]}' return formatted_data - + def get_db_size_data(self): data = {} for instance, stats in self.redis_servers_stats(): - dbs = [stat for stat in stats.keys() if stat.startswith('db')] + dbs = [stat for stat in stats.keys() if stat.startswith("db")] for db in dbs: - data[f'{instance}-{db}'] = f'redis_size{{db="{db}"}} {stats[db]["keys"]}' + data[f"{instance}-{db}"] = f'redis_size{{db="{db}"}} {stats[db]["keys"]}' return data def get_context(self): - if self.fields[0][0] == 'size': + if self.fields[0][0] == "size": formatted_data = self.get_db_size_data() else: values = self.execute() formatted_data = self.format_data(values) context = { "data": formatted_data, - "chart_name": self.fields[0][1]['label'], - "chart_type": self.fields[0][1]['type'], + "chart_name": self.fields[0][1]["label"], + "chart_type": self.fields[0][1]["type"], } return context - + @property def response_body(self): context = self.get_context() - return render_template('prometheus_data.html', **context) + return render_template("prometheus_data.html", **context) @app.route("/active-connections/") def active_connections(): conf = { - 'title': "Redis active connections", - 'fields': ( - ('connected_clients', dict( - label="redis_active_connections", - type="gauge", - )), + "title": "Redis active connections", + "fields": ( + ( + "connected_clients", + dict( + label="redis_active_connections", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) return Response(redis_metric.response_body, content_type="text/plain") + @app.route("/commands/") def commands(): conf = { - 'title': "Redis commands", - 'fields': ( - ('total_commands_processed', dict( - label="redis_commands", - type="gauge", - )), + "title": "Redis commands", + "fields": ( + ( + "total_commands_processed", + dict( + label="redis_commands", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/connects/") def connects(): conf = { - 'title': "Redis connections per second", - 'fields': ( - ('total_connections_received', dict( - label="redis_connects", - type="counter", - )), + "title": "Redis connections per second", + "fields": ( + ( + "total_connections_received", + dict( + label="redis_connects", + type="counter", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/size/") def size(): - conf = { - 'title': "Redis DB size", - 'fields': ( - ('size', dict( - label="redis_size", - type="gauge", - )), - ) + "title": "Redis DB size", + "fields": ( + ( + "size", + dict( + label="redis_size", + type="gauge", + ), + ), + ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/memory/") def memory(): conf = { - 'title': "Redis Total Memory", - 'fields': ( - ('total_system_memory', dict( - label="redis_memory", - type="gauge", - )), + "title": "Redis Total Memory", + "fields": ( + ( + "total_system_memory", + dict( + label="redis_memory", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") @app.route("/used-memory/") def memory_used(): conf = { - 'title': "Redis Used Memory", - 'fields': ( - ('used_memory', dict( - label="redis_used_memory", - type="gauge", - )), + "title": "Redis Used Memory", + "fields": ( + ( + "used_memory", + dict( + label="redis_used_memory", + type="gauge", + ), + ), ), } redis_metric = RedisMetric(**conf) context = redis_metric.get_context() - html_body = render_template('prometheus_data.html', **context) + html_body = render_template("prometheus_data.html", **context) return Response(html_body, content_type="text/plain") diff --git a/flask_monitor/db_monitor.py b/flask_monitor/db_monitor.py index eb95dd44b3..b37396bb8a 100644 --- a/flask_monitor/db_monitor.py +++ b/flask_monitor/db_monitor.py @@ -22,17 +22,18 @@ PRIMARY_STATE = 1 SECONDARY_STATE = 2 + @app.route("/db_check/postgres") def db_check_postgres(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) connect_params = "dbname='%s' user='%s' password='%s' host='%s' port='%s'" % ( - settings.DATABASES['default']['NAME'], - settings.DATABASES['default']['USER'], - settings.DATABASES['default']['PASSWORD'], - f'{settings.SERVER_NAME}.node.nyc1.consul', - settings.DATABASES['default']['PORT'], + settings.DATABASES["default"]["NAME"], + settings.DATABASES["default"]["USER"], + settings.DATABASES["default"]["PASSWORD"], + f"{settings.SERVER_NAME}.node.nyc1.consul", + settings.DATABASES["default"]["PORT"], ) try: conn = psycopg2.connect(connect_params) @@ -45,28 +46,30 @@ def db_check_postgres(): rows = cur.fetchall() for row in rows: return str(row[0]) - + abort(Response("No rows found", 504)) + @app.route("/db_check/mysql") def db_check_mysql(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) connect_params = "dbname='%s' user='%s' password='%s' host='%s' port='%s'" % ( - settings.DATABASES['default']['NAME'], - settings.DATABASES['default']['USER'], - settings.DATABASES['default']['PASSWORD'], - settings.DATABASES['default']['HOST'], - settings.DATABASES['default']['PORT'], + settings.DATABASES["default"]["NAME"], + settings.DATABASES["default"]["USER"], + settings.DATABASES["default"]["PASSWORD"], + settings.DATABASES["default"]["HOST"], + settings.DATABASES["default"]["PORT"], ) try: - - conn = pymysql.connect(host='mysql', - port=settings.DATABASES['default']['PORT'], - user=settings.DATABASES['default']['USER'], - passwd=settings.DATABASES['default']['PASSWORD'], - db=settings.DATABASES['default']['NAME']) + conn = pymysql.connect( + host="mysql", + port=settings.DATABASES["default"]["PORT"], + user=settings.DATABASES["default"]["USER"], + passwd=settings.DATABASES["default"]["PASSWORD"], + db=settings.DATABASES["default"]["NAME"], + ) except: print(" ---> Mysql can't connect to the database: %s" % connect_params) abort(Response("Can't connect to mysql db", 503)) @@ -76,17 +79,20 @@ def db_check_mysql(): rows = cur.fetchall() for row in rows: return str(row[0]) - + abort(Response("No rows found", 504)) + @app.route("/db_check/mongo") def db_check_mongo(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) try: # The `mongo` hostname below is a reference to the newsblurnet docker network, where 172.18.0.0/16 is defined - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.nyc1.consul/?authSource=admin") + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.SERVER_NAME}.node.nyc1.consul/?authSource=admin" + ) db = client.newsblur except: abort(Response("Can't connect to db", 503)) @@ -98,25 +104,25 @@ def db_check_mongo(): except pymongo.errors.ServerSelectionTimeoutError: abort(Response("Server selection timeout", 503)) except pymongo.errors.OperationFailure as e: - if 'Authentication failed' in str(e): + if "Authentication failed" in str(e): abort(Response("Auth failed", 506)) abort(Response("Operation Failure", 507)) - + if not stories: abort(Response("No stories", 510)) - - status = client.admin.command('replSetGetStatus') - members = status['members'] + + status = client.admin.command("replSetGetStatus") + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: abort(Response("No optime", 511)) @@ -126,43 +132,47 @@ def db_check_mongo(): return str(stories) + @app.route("/db_check/mongo_analytics") def db_check_mongo_analytics(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) try: - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin") + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.SERVER_NAME}.node.consul/?authSource=admin" + ) db = client.nbanalytics except: abort(Response("Can't connect to db", 503)) - + try: fetches = db.feed_fetches.estimated_document_count() except (pymongo.errors.NotMasterError, pymongo.errors.ServerSelectionTimeoutError): abort(Response("Not Master / Server selection timeout", 504)) except pymongo.errors.OperationFailure as e: - if 'Authentication failed' in str(e): + if "Authentication failed" in str(e): abort(Response("Auth failed", 505)) abort(Response("Operation failure", 506)) - + if not fetches: abort(Response("No fetches in data", 510)) - + return str(fetches) + @app.route("/db_check/redis_user") def db_check_redis_user(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_USER_PORT) + port = request.args.get("port", settings.REDIS_USER_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=0) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=0) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -173,18 +183,19 @@ def db_check_redis_user(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_story") -def db_check_redis_story(): - if request.args.get('consul') == '1': +def db_check_redis_story(): + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_STORY_PORT) - + port = request.args.get("port", settings.REDIS_STORY_PORT) + try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=1) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=1) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -195,18 +206,19 @@ def db_check_redis_story(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_sessions") def db_check_redis_sessions(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_SESSION_PORT) + port = request.args.get("port", settings.REDIS_SESSION_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=5) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=5) except: abort(Response("Can't connect to db", 503)) - + try: randkey = r.randomkey() except: @@ -217,18 +229,19 @@ def db_check_redis_sessions(): else: abort(Response("Can't find a randomkey", 505)) + @app.route("/db_check/redis_pubsub") def db_check_redis_pubsub(): - if request.args.get('consul') == '1': + if request.args.get("consul") == "1": return str(1) - port = request.args.get('port', settings.REDIS_PUBSUB_PORT) + port = request.args.get("port", settings.REDIS_PUBSUB_PORT) try: - r = redis.Redis(f'{settings.SERVER_NAME}.node.nyc1.consul', port=port, db=1) + r = redis.Redis(f"{settings.SERVER_NAME}.node.nyc1.consul", port=port, db=1) except: abort(Response("Can't connect to db", 503)) - + try: pubsub_numpat = r.pubsub_numpat() except: @@ -239,17 +252,18 @@ def db_check_redis_pubsub(): else: abort(Response("Can't find a pubsub_numpat", 505)) + @app.route("/db_check/elasticsearch") def db_check_elasticsearch(): try: conn = elasticsearch.Elasticsearch("elasticsearch") except: abort(Response("Can't connect to db", 503)) - - if request.args.get('consul') == '1': + + if request.args.get("consul") == "1": return str(1) - - if conn.indices.exists('feeds-index'): + + if conn.indices.exists("feeds-index"): return str("Index exists, but didn't try search") # query = pyes.query.TermQuery("title", "daring fireball") # results = conn.search(query=query, size=1, doc_types=['feeds-type'], sort="num_subscribers:desc") @@ -260,6 +274,7 @@ def db_check_elasticsearch(): else: abort(Response("Couldn't find feeds-index", 504)) + if __name__ == "__main__": print(" ---> Starting NewsBlur DB monitor flask server...") app.run(host="0.0.0.0", port=5579) diff --git a/manage.py b/manage.py index 8ff26d71aa..0261a9b88c 100755 --- a/manage.py +++ b/manage.py @@ -8,4 +8,3 @@ from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) - diff --git a/newsblur_web/__init__.py b/newsblur_web/__init__.py index a711f1df8d..3990cb0a88 100644 --- a/newsblur_web/__init__.py +++ b/newsblur_web/__init__.py @@ -4,4 +4,4 @@ # Django starts so that shared_task will use this app. from .celeryapp import app as celery_app -__all__ = ['celery_app'] +__all__ = ["celery_app"] diff --git a/newsblur_web/celeryapp.py b/newsblur_web/celeryapp.py index 146be96a66..e49929029a 100644 --- a/newsblur_web/celeryapp.py +++ b/newsblur_web/celeryapp.py @@ -2,16 +2,17 @@ import os from celery import Celery from django.apps import apps + # set the default Django settings module for the 'celery' program. -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'newsblur_web.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newsblur_web.settings") -app = Celery('newsblur_web') +app = Celery("newsblur_web") # Using a string here means the worker doesn't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings', namespace="CELERY") +app.config_from_object("django.conf:settings", namespace="CELERY") # Load task modules from all registered Django app configs. app.autodiscover_tasks(lambda: [n.name for n in apps.get_app_configs()]) diff --git a/newsblur_web/docker_local_settings.py b/newsblur_web/docker_local_settings.py index 31e353f543..fca2669808 100644 --- a/newsblur_web/docker_local_settings.py +++ b/newsblur_web/docker_local_settings.py @@ -5,15 +5,13 @@ # = Server Settings = # =================== -ADMINS = ( - ('Samuel Clay', 'samuel@newsblur.com'), -) +ADMINS = (("Samuel Clay", "samuel@newsblur.com"),) -SERVER_EMAIL = 'server@newsblur.com' -HELLO_EMAIL = 'hello@newsblur.com' -NEWSBLUR_URL = 'https://localhost' -PUSH_DOMAIN = 'localhost' -SESSION_COOKIE_DOMAIN = 'localhost' +SERVER_EMAIL = "server@newsblur.com" +HELLO_EMAIL = "hello@newsblur.com" +NEWSBLUR_URL = "https://localhost" +PUSH_DOMAIN = "localhost" +SESSION_COOKIE_DOMAIN = "localhost" # =================== # = Global Settings = @@ -23,24 +21,24 @@ DEBUG = False # DEBUG = True -# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run +# DEBUG_ASSETS controls JS/CSS asset packaging. Turning this off requires you to run # `./manage.py collectstatic` first. Turn this on for development so you can see -# changes in your JS/CSS. +# changes in your JS/CSS. DEBUG_ASSETS = False # Make sure to run `./manage.py collectstatic` first DEBUG_ASSETS = True # DEBUG_QUERIES controls the output of the database query logs. Can be rather verbose -# but is useful to catch slow running queries. A summary is also useful in cutting +# but is useful to catch slow running queries. A summary is also useful in cutting # down verbosity. DEBUG_QUERIES = DEBUG DEBUG_QUERIES_SUMMARY_ONLY = True # DEBUG_QUERIES_SUMMARY_ONLY = False -MEDIA_URL = '/media/' -IMAGES_URL = '/imageproxy' +MEDIA_URL = "/media/" +IMAGES_URL = "/imageproxy" # Uncomment below to debug iOS/Android widget # IMAGES_URL = 'https://haproxy/imageproxy' -SECRET_KEY = 'YOUR SECRET KEY' +SECRET_KEY = "YOUR SECRET KEY" AUTO_PREMIUM_NEW_USERS = True AUTO_PREMIUM_ARCHIVE_NEW_USERS = True AUTO_PREMIUM_PRO_NEW_USERS = True @@ -57,27 +55,27 @@ PRO_MINUTES_BETWEEN_FETCHES = 15 CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://db_redis:6579/6', + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://db_redis:6579/6", }, } -EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' +EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend" # Set this to the username that is shown on the homepage to unauthenticated users. -HOMEPAGE_USERNAME = 'popular' +HOMEPAGE_USERNAME = "popular" # Google Reader OAuth API Keys -OAUTH_KEY = 'www.example.com' -OAUTH_SECRET = 'SECRET_KEY_FROM_GOOGLE' +OAUTH_KEY = "www.example.com" +OAUTH_SECRET = "SECRET_KEY_FROM_GOOGLE" -S3_ACCESS_KEY = 'XXX' -S3_SECRET = 'SECRET' -S3_BACKUP_BUCKET = 'newsblur-backups' -S3_PAGES_BUCKET_NAME = 'pages-XXX.newsblur.com' -S3_ICONS_BUCKET_NAME = 'icons-XXX.newsblur.com' -S3_AVATARS_BUCKET_NAME = 'avatars-XXX.newsblur.com' +S3_ACCESS_KEY = "XXX" +S3_SECRET = "SECRET" +S3_BACKUP_BUCKET = "newsblur-backups" +S3_PAGES_BUCKET_NAME = "pages-XXX.newsblur.com" +S3_ICONS_BUCKET_NAME = "icons-XXX.newsblur.com" +S3_AVATARS_BUCKET_NAME = "avatars-XXX.newsblur.com" STRIPE_SECRET = "YOUR-SECRET-API-KEY" STRIPE_PUBLISHABLE = "YOUR-PUBLISHABLE-API-KEY" @@ -86,10 +84,10 @@ # = Social APIs = # =============== -FACEBOOK_APP_ID = '111111111111111' -FACEBOOK_SECRET = '99999999999999999999999999999999' -TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo' -TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' +FACEBOOK_APP_ID = "111111111111111" +FACEBOOK_SECRET = "99999999999999999999999999999999" +TWITTER_CONSUMER_KEY = "ooooooooooooooooooooo" +TWITTER_CONSUMER_SECRET = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # ============= @@ -97,51 +95,34 @@ # ============= DATABASES = { - 'default': { - 'NAME': 'newsblur', - 'ENGINE': 'django_prometheus.db.backends.postgresql', + "default": { + "NAME": "newsblur", + "ENGINE": "django_prometheus.db.backends.postgresql", #'ENGINE': 'django.db.backends.mysql', - 'USER': 'newsblur', - 'PASSWORD': 'newsblur', - 'HOST': 'db_postgres', - 'PORT': 5432 + "USER": "newsblur", + "PASSWORD": "newsblur", + "HOST": "db_postgres", + "PORT": 5432, }, } -MONGO_DB = { - 'name': 'newsblur', - 'host': 'db_mongo:29019' -} +MONGO_DB = {"name": "newsblur", "host": "db_mongo:29019"} MONGO_ANALYTICS_DB = { - 'name': 'nbanalytics', - 'host': 'db_mongo:29019', + "name": "nbanalytics", + "host": "db_mongo:29019", } -MONGODB_SLAVE = { - 'host': 'db_mongo' -} +MONGODB_SLAVE = {"host": "db_mongo"} # Celery RabbitMQ/Redis Broker BROKER_URL = "redis://db_redis:6579/0" CELERY_RESULT_BACKEND = BROKER_URL CELERY_WORKER_CONCURRENCY = 1 -REDIS_USER = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_PUBSUB = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_STORY = { - 'host': 'db_redis', - 'port': 6579 -} -REDIS_SESSIONS = { - 'host': 'db_redis', - 'port': 6579 -} +REDIS_USER = {"host": "db_redis", "port": 6579} +REDIS_PUBSUB = {"host": "db_redis", "port": 6579} +REDIS_STORY = {"host": "db_redis", "port": 6579} +REDIS_SESSIONS = {"host": "db_redis", "port": 6579} CELERY_REDIS_DB_NUM = 4 SESSION_REDIS_DB = 5 @@ -153,9 +134,9 @@ ELASTICSEARCH_STORY_HOST = "http://db_elasticsearch:9200" BACKED_BY_AWS = { - 'pages_on_node': False, - 'pages_on_s3': False, - 'icons_on_s3': False, + "pages_on_node": False, + "pages_on_s3": False, + "icons_on_s3": False, } @@ -167,25 +148,27 @@ LOG_TO_STREAM = True if len(logging._handlerList) < 1: - LOG_FILE = '~/newsblur/logs/development.log' - logging.basicConfig(level=logging.DEBUG, - format='%(asctime)-12s: %(message)s', - datefmt='%b %d %H:%M:%S', - handler=logging.StreamHandler) + LOG_FILE = "~/newsblur/logs/development.log" + logging.basicConfig( + level=logging.DEBUG, + format="%(asctime)-12s: %(message)s", + datefmt="%b %d %H:%M:%S", + handler=logging.StreamHandler, + ) -MAILGUN_ACCESS_KEY = 'key-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx' -MAILGUN_SERVER_NAME = 'newsblur.com' +MAILGUN_ACCESS_KEY = "key-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" +MAILGUN_SERVER_NAME = "newsblur.com" -DO_TOKEN_LOG = '0000000000000000000000000000000000000000000000000000000000000000' -DO_TOKEN_FABRIC = '0000000000000000000000000000000000000000000000000000000000000000' +DO_TOKEN_LOG = "0000000000000000000000000000000000000000000000000000000000000000" +DO_TOKEN_FABRIC = "0000000000000000000000000000000000000000000000000000000000000000" SERVER_NAME = "nblocalhost" NEWSBLUR_URL = os.getenv("NEWSBLUR_URL", "https://localhost") -if NEWSBLUR_URL == 'https://localhost': +if NEWSBLUR_URL == "https://localhost": SESSION_COOKIE_DOMAIN = "localhost" -SESSION_ENGINE = 'redis_sessions.session' +SESSION_ENGINE = "redis_sessions.session" # CORS_ORIGIN_REGEX_WHITELIST = ('^(https?://)?(\w+\.)?nb.local\.com$', ) diff --git a/newsblur_web/settings.py b/newsblur_web/settings.py index 046b597820..35bae73683 100644 --- a/newsblur_web/settings.py +++ b/newsblur_web/settings.py @@ -7,23 +7,23 @@ # = Directory Declaractions = # =========================== -SETTINGS_DIR = os.path.dirname(__file__) -NEWSBLUR_DIR = os.path.join(SETTINGS_DIR, "../") -MEDIA_ROOT = os.path.join(NEWSBLUR_DIR, 'media') -STATIC_ROOT = os.path.join(NEWSBLUR_DIR, 'static') -UTILS_ROOT = os.path.join(NEWSBLUR_DIR, 'utils') -VENDOR_ROOT = os.path.join(NEWSBLUR_DIR, 'vendor') -LOG_FILE = os.path.join(NEWSBLUR_DIR, 'logs/newsblur.log') -IMAGE_MASK = os.path.join(NEWSBLUR_DIR, 'media/img/mask.png') +SETTINGS_DIR = os.path.dirname(__file__) +NEWSBLUR_DIR = os.path.join(SETTINGS_DIR, "../") +MEDIA_ROOT = os.path.join(NEWSBLUR_DIR, "media") +STATIC_ROOT = os.path.join(NEWSBLUR_DIR, "static") +UTILS_ROOT = os.path.join(NEWSBLUR_DIR, "utils") +VENDOR_ROOT = os.path.join(NEWSBLUR_DIR, "vendor") +LOG_FILE = os.path.join(NEWSBLUR_DIR, "logs/newsblur.log") +IMAGE_MASK = os.path.join(NEWSBLUR_DIR, "media/img/mask.png") # ============== # = PYTHONPATH = # ============== -if '/utils' not in ' '.join(sys.path): +if "/utils" not in " ".join(sys.path): sys.path.append(UTILS_ROOT) -if '/vendor' not in ' '.join(sys.path): +if "/vendor" not in " ".join(sys.path): sys.path.append(VENDOR_ROOT) import datetime @@ -47,17 +47,15 @@ # = Server Settings = # =================== -ADMINS = ( - ('Samuel Clay', 'samuel@newsblur.com'), -) +ADMINS = (("Samuel Clay", "samuel@newsblur.com"),) -SERVER_NAME = 'newsblur' -SERVER_EMAIL = 'server@newsblur.com' -HELLO_EMAIL = 'hello@newsblur.com' -NEWSBLUR_URL = 'https://www.newsblur.com' -IMAGES_URL = 'https://imageproxy.newsblur.com' -PUSH_DOMAIN = 'push.newsblur.com' -SECRET_KEY = 'YOUR_SECRET_KEY' +SERVER_NAME = "newsblur" +SERVER_EMAIL = "server@newsblur.com" +HELLO_EMAIL = "hello@newsblur.com" +NEWSBLUR_URL = "https://www.newsblur.com" +IMAGES_URL = "https://imageproxy.newsblur.com" +PUSH_DOMAIN = "push.newsblur.com" +SECRET_KEY = "YOUR_SECRET_KEY" IMAGES_SECRET_KEY = "YOUR_SECRET_IMAGE_KEY" DNSIMPLE_TOKEN = "YOUR_DNSIMPLE_TOKEN" RECAPTCHA_SECRET_KEY = "YOUR_RECAPTCHA_KEY" @@ -71,40 +69,40 @@ # = Global Settings = # =================== -DEBUG = True -TEST_DEBUG = False +DEBUG = True +TEST_DEBUG = False SEND_BROKEN_LINK_EMAILS = False -DEBUG_QUERIES = False -MANAGERS = ADMINS -PAYPAL_RECEIVER_EMAIL = 'samuel@ofbrooklyn.com' -TIME_ZONE = 'GMT' -LANGUAGE_CODE = 'en-us' -SITE_ID = 1 -USE_I18N = False -LOGIN_REDIRECT_URL = '/' -LOGIN_URL = '/account/login' -MEDIA_URL = '/media/' +DEBUG_QUERIES = False +MANAGERS = ADMINS +PAYPAL_RECEIVER_EMAIL = "samuel@ofbrooklyn.com" +TIME_ZONE = "GMT" +LANGUAGE_CODE = "en-us" +SITE_ID = 1 +USE_I18N = False +LOGIN_REDIRECT_URL = "/" +LOGIN_URL = "/account/login" +MEDIA_URL = "/media/" # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". -CIPHER_USERNAMES = False -DEBUG_ASSETS = True -HOMEPAGE_USERNAME = 'popular' -ALLOWED_HOSTS = ['*'] +CIPHER_USERNAMES = False +DEBUG_ASSETS = True +HOMEPAGE_USERNAME = "popular" +ALLOWED_HOSTS = ["*"] AUTO_PREMIUM_NEW_USERS = True AUTO_ENABLE_NEW_USERS = True ENFORCE_SIGNUP_CAPTCHA = False -ENABLE_PUSH = True -PAYPAL_TEST = False -DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB -FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB +ENABLE_PUSH = True +PAYPAL_TEST = False +DATA_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB +FILE_UPLOAD_MAX_MEMORY_SIZE = 5242880 # 5 MB PROMETHEUS_EXPORT_MIGRATIONS = False -MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour -MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 15 # 15 minutes -MAX_EMAILS_SENT_PER_DAY_PER_USER = 20 # Most are story notifications +MAX_SECONDS_COMPLETE_ARCHIVE_FETCH = 60 * 60 * 1 # 1 hour +MAX_SECONDS_ARCHIVE_FETCH_SINGLE_FEED = 60 * 15 # 15 minutes +MAX_EMAILS_SENT_PER_DAY_PER_USER = 20 # Most are story notifications -# Uncomment below to force all feeds to store this many stories. Default is to cut +# Uncomment below to force all feeds to store this many stories. Default is to cut # off at 25 stories for single subscriber non-premium feeds and 500 for popular feeds. # OVERRIDE_STORY_COUNT_MAX = 1000 @@ -114,31 +112,31 @@ MIDDLEWARE = ( - 'django_prometheus.middleware.PrometheusBeforeMiddleware', - 'django.middleware.gzip.GZipMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'subdomains.middleware.SubdomainMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'apps.profile.middleware.TimingMiddleware', - 'apps.profile.middleware.LastSeenMiddleware', - 'apps.profile.middleware.UserAgentBanMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'apps.profile.middleware.SimpsonsMiddleware', - 'apps.profile.middleware.ServerHostnameMiddleware', - 'oauth2_provider.middleware.OAuth2TokenMiddleware', + "django_prometheus.middleware.PrometheusBeforeMiddleware", + "django.middleware.gzip.GZipMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "subdomains.middleware.SubdomainMiddleware", + "django.middleware.common.CommonMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "apps.profile.middleware.TimingMiddleware", + "apps.profile.middleware.LastSeenMiddleware", + "apps.profile.middleware.UserAgentBanMiddleware", + "corsheaders.middleware.CorsMiddleware", + "apps.profile.middleware.SimpsonsMiddleware", + "apps.profile.middleware.ServerHostnameMiddleware", + "oauth2_provider.middleware.OAuth2TokenMiddleware", # 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'utils.request_introspection_middleware.DumpRequestMiddleware', - 'apps.profile.middleware.DBProfilerMiddleware', - 'apps.profile.middleware.SQLLogToConsoleMiddleware', - 'utils.redis_raw_log_middleware.RedisDumpMiddleware', - 'django_prometheus.middleware.PrometheusAfterMiddleware', + "utils.request_introspection_middleware.DumpRequestMiddleware", + "apps.profile.middleware.DBProfilerMiddleware", + "apps.profile.middleware.SQLLogToConsoleMiddleware", + "utils.redis_raw_log_middleware.RedisDumpMiddleware", + "django_prometheus.middleware.PrometheusAfterMiddleware", ) AUTHENTICATION_BACKENDS = ( - 'oauth2_provider.backends.OAuth2Backend', - 'django.contrib.auth.backends.ModelBackend', + "oauth2_provider.backends.OAuth2Backend", + "django.contrib.auth.backends.ModelBackend", ) CORS_ORIGIN_ALLOW_ALL = True @@ -146,14 +144,14 @@ CORS_ALLOW_CREDENTIALS = True OAUTH2_PROVIDER = { - 'SCOPES': { - 'read': 'View new unread stories, saved stories, and shared stories.', - 'write': 'Create new saved stories, shared stories, and subscriptions.', - 'ifttt': 'Pair your NewsBlur account with other services.', + "SCOPES": { + "read": "View new unread stories, saved stories, and shared stories.", + "write": "Create new saved stories, shared stories, and subscriptions.", + "ifttt": "Pair your NewsBlur account with other services.", }, - 'CLIENT_ID_GENERATOR_CLASS': 'oauth2_provider.generators.ClientIdGenerator', - 'ACCESS_TOKEN_EXPIRE_SECONDS': 60*60*24*365*10, # 10 years - 'AUTHORIZATION_CODE_EXPIRE_SECONDS': 60*60, # 1 hour + "CLIENT_ID_GENERATOR_CLASS": "oauth2_provider.generators.ClientIdGenerator", + "ACCESS_TOKEN_EXPIRE_SECONDS": 60 * 60 * 24 * 365 * 10, # 10 years + "AUTHORIZATION_CODE_EXPIRE_SECONDS": 60 * 60, # 1 hour } # =========== @@ -161,104 +159,87 @@ # =========== LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': '[%(asctime)-12s] %(message)s', - 'datefmt': '%b %d %H:%M:%S' - }, - 'simple': { - 'format': '%(message)s' - }, + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "verbose": {"format": "[%(asctime)-12s] %(message)s", "datefmt": "%b %d %H:%M:%S"}, + "simple": {"format": "%(message)s"}, }, - 'handlers': { - 'null': { - 'level':'DEBUG', - 'class':'logging.NullHandler', - }, - 'console':{ - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' + "handlers": { + "null": { + "level": "DEBUG", + "class": "logging.NullHandler", }, - 'vendor.apns':{ - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' + "console": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose"}, + "vendor.apns": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "verbose"}, + "log_file": { + "level": "DEBUG", + "class": "logging.handlers.RotatingFileHandler", + "filename": LOG_FILE, + "maxBytes": 16777216, # 16megabytes + "formatter": "verbose", }, - 'log_file':{ - 'level': 'DEBUG', - 'class': 'logging.handlers.RotatingFileHandler', - 'filename': LOG_FILE, - 'maxBytes': 16777216, # 16megabytes - 'formatter': 'verbose' - }, - 'mail_admins': { - 'level': 'CRITICAL', - 'class': 'django.utils.log.AdminEmailHandler', + "mail_admins": { + "level": "CRITICAL", + "class": "django.utils.log.AdminEmailHandler", # 'filters': ['require_debug_false'], - 'include_html': True, + "include_html": True, }, }, - 'loggers': { - 'django': { - 'handlers': ['console', 'log_file', 'mail_admins'], - 'level': 'ERROR', - 'propagate': False, + "loggers": { + "django": { + "handlers": ["console", "log_file", "mail_admins"], + "level": "ERROR", + "propagate": False, }, - 'django.db.backends': { - 'handlers': ['console'], - 'level': 'INFO', - 'propagate': False, + "django.db.backends": { + "handlers": ["console"], + "level": "INFO", + "propagate": False, }, - 'django.security.DisallowedHost': { - 'handlers': ['null'], - 'propagate': False, + "django.security.DisallowedHost": { + "handlers": ["null"], + "propagate": False, }, - 'elasticsearch': { - 'handlers': ['console', 'log_file'], - 'level': 'ERROR', + "elasticsearch": { + "handlers": ["console", "log_file"], + "level": "ERROR", # 'level': 'DEBUG', - 'propagate': False, + "propagate": False, }, - 'elasticsearch.trace': { - 'handlers': ['console', 'log_file'], - 'level': 'ERROR', + "elasticsearch.trace": { + "handlers": ["console", "log_file"], + "level": "ERROR", # 'level': 'DEBUG', - 'propagate': False, + "propagate": False, }, - 'zebra': { - 'handlers': ['console', 'log_file'], + "zebra": { + "handlers": ["console", "log_file"], # 'level': 'ERROR', - 'level': 'DEBUG', - 'propagate': False, + "level": "DEBUG", + "propagate": False, }, - 'newsblur': { - 'handlers': ['console', 'log_file'], - 'level': 'DEBUG', - 'propagate': False, + "newsblur": { + "handlers": ["console", "log_file"], + "level": "DEBUG", + "propagate": False, }, - 'readability': { - 'handlers': ['console', 'log_file'], - 'level': 'WARNING', - 'propagate': False, + "readability": { + "handlers": ["console", "log_file"], + "level": "WARNING", + "propagate": False, }, - 'apps': { - 'handlers': ['log_file'], - 'level': 'DEBUG', - 'propagate': True, + "apps": { + "handlers": ["log_file"], + "level": "DEBUG", + "propagate": True, + }, + "subdomains.middleware": { + "level": "ERROR", + "propagate": False, }, - 'subdomains.middleware': { - 'level': 'ERROR', - 'propagate': False, - } - }, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' - } }, + "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, } logging.getLogger("requests").setLevel(logging.WARNING) @@ -268,48 +249,48 @@ # = Miscellaneous Settings = # ========================== -DAYS_OF_UNREAD = 30 -DAYS_OF_UNREAD_FREE = 14 -DAYS_OF_UNREAD_ARCHIVE = 9999 +DAYS_OF_UNREAD = 30 +DAYS_OF_UNREAD_FREE = 14 +DAYS_OF_UNREAD_ARCHIVE = 9999 # DoSH can be more, since you can up this value by N, and after N days, -# you can then up the DAYS_OF_UNREAD value with no impact. +# you can then up the DAYS_OF_UNREAD value with no impact. # The max is for archive subscribers. -DAYS_OF_STORY_HASHES = DAYS_OF_UNREAD +DAYS_OF_STORY_HASHES = DAYS_OF_UNREAD DAYS_OF_STORY_HASHES_ARCHIVE = DAYS_OF_UNREAD_ARCHIVE # SUBSCRIBER_EXPIRE sets the number of days after which a user who hasn't logged in # is no longer considered an active subscriber -SUBSCRIBER_EXPIRE = 7 +SUBSCRIBER_EXPIRE = 7 -# PRO_MINUTES_BETWEEN_FETCHES sets the number of minutes to fetch feeds for +# PRO_MINUTES_BETWEEN_FETCHES sets the number of minutes to fetch feeds for # Premium Pro accounts. Defaults to every 5 minutes, but that's for NewsBlur # servers. On your local, you should probably set this to 10-15 minutes PRO_MINUTES_BETWEEN_FETCHES = 5 -ROOT_URLCONF = 'newsblur_web.urls' -INTERNAL_IPS = ('127.0.0.1',) -LOGGING_LOG_SQL = True -APPEND_SLASH = False -SESSION_ENGINE = 'redis_sessions.session' -TEST_RUNNER = "utils.testrunner.TestRunner" -SESSION_COOKIE_NAME = 'newsblur_sessionid' -SESSION_COOKIE_AGE = 60*60*24*365*10 # 10 years -SESSION_COOKIE_DOMAIN = '.newsblur.com' +ROOT_URLCONF = "newsblur_web.urls" +INTERNAL_IPS = ("127.0.0.1",) +LOGGING_LOG_SQL = True +APPEND_SLASH = False +SESSION_ENGINE = "redis_sessions.session" +TEST_RUNNER = "utils.testrunner.TestRunner" +SESSION_COOKIE_NAME = "newsblur_sessionid" +SESSION_COOKIE_AGE = 60 * 60 * 24 * 365 * 10 # 10 years +SESSION_COOKIE_DOMAIN = ".newsblur.com" SESSION_COOKIE_HTTPONLY = False -SESSION_COOKIE_SECURE = True -SENTRY_DSN = 'https://XXXNEWSBLURXXX@app.getsentry.com/99999999' -SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer' -DATA_UPLOAD_MAX_NUMBER_FIELDS = None # Handle long /reader/complete_river calls -EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' +SESSION_COOKIE_SECURE = True +SENTRY_DSN = "https://XXXNEWSBLURXXX@app.getsentry.com/99999999" +SESSION_SERIALIZER = "django.contrib.sessions.serializers.PickleSerializer" +DATA_UPLOAD_MAX_NUMBER_FIELDS = None # Handle long /reader/complete_river calls +EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend" # ============== # = Subdomains = # ============== SUBDOMAIN_URLCONFS = { - None: 'newsblur_web.urls', - 'www': 'newsblur_web.urls', - 'nb': 'newsblur_web.urls', + None: "newsblur_web.urls", + "www": "newsblur_web.urls", + "nb": "newsblur_web.urls", } REMOVE_WWW_FROM_DOMAIN = True @@ -324,42 +305,42 @@ # = Django Apps = # =============== -OAUTH2_PROVIDER_APPLICATION_MODEL = 'oauth2_provider.Application' +OAUTH2_PROVIDER_APPLICATION_MODEL = "oauth2_provider.Application" INSTALLED_APPS = ( - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.sites', - 'django.contrib.admin', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_extensions', - 'django_prometheus', - 'paypal.standard.ipn', - 'apps.rss_feeds', - 'apps.reader', - 'apps.analyzer', - 'apps.feed_import', - 'apps.profile', - 'apps.recommendations', - 'apps.statistics', - 'apps.notifications', - 'apps.static', - 'apps.mobile', - 'apps.push', - 'apps.social', - 'apps.oauth', - 'apps.search', - 'apps.categories', - 'utils', # missing models so no migrations - 'vendor', - 'typogrify', - 'vendor.zebra', - 'anymail', - 'oauth2_provider', - 'corsheaders', - 'pipeline', + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.sites", + "django.contrib.admin", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_extensions", + "django_prometheus", + "paypal.standard.ipn", + "apps.rss_feeds", + "apps.reader", + "apps.analyzer", + "apps.feed_import", + "apps.profile", + "apps.recommendations", + "apps.statistics", + "apps.notifications", + "apps.static", + "apps.mobile", + "apps.push", + "apps.social", + "apps.oauth", + "apps.search", + "apps.categories", + "utils", # missing models so no migrations + "vendor", + "typogrify", + "vendor.zebra", + "anymail", + "oauth2_provider", + "corsheaders", + "pipeline", ) # =================== @@ -378,30 +359,12 @@ # ========== CELERY_TASK_ROUTES = { - "work-queue": { - "queue": "work_queue", - "binding_key": "work_queue" - }, - "new-feeds": { - "queue": "new_feeds", - "binding_key": "new_feeds" - }, - "push-feeds": { - "queue": "push_feeds", - "binding_key": "push_feeds" - }, - "update-feeds": { - "queue": "update_feeds", - "binding_key": "update_feeds" - }, - "beat-tasks": { - "queue": "cron_queue", - "binding_key": "cron_queue" - }, - "search-indexer": { - "queue": "search_indexer", - "binding_key": "search_indexer" - }, + "work-queue": {"queue": "work_queue", "binding_key": "work_queue"}, + "new-feeds": {"queue": "new_feeds", "binding_key": "new_feeds"}, + "push-feeds": {"queue": "push_feeds", "binding_key": "push_feeds"}, + "update-feeds": {"queue": "update_feeds", "binding_key": "update_feeds"}, + "beat-tasks": {"queue": "cron_queue", "binding_key": "cron_queue"}, + "search-indexer": {"queue": "search_indexer", "binding_key": "search_indexer"}, } CELERY_TASK_QUEUES = { "work_queue": { @@ -409,114 +372,100 @@ "exchange_type": "direct", "binding_key": "work_queue", }, - "new_feeds": { - "exchange": "new_feeds", - "exchange_type": "direct", - "binding_key": "new_feeds" - }, - "push_feeds": { - "exchange": "push_feeds", - "exchange_type": "direct", - "binding_key": "push_feeds" - }, - "update_feeds": { - "exchange": "update_feeds", - "exchange_type": "direct", - "binding_key": "update_feeds" - }, - "cron_queue": { - "exchange": "cron_queue", - "exchange_type": "direct", - "binding_key": "cron_queue" - }, + "new_feeds": {"exchange": "new_feeds", "exchange_type": "direct", "binding_key": "new_feeds"}, + "push_feeds": {"exchange": "push_feeds", "exchange_type": "direct", "binding_key": "push_feeds"}, + "update_feeds": {"exchange": "update_feeds", "exchange_type": "direct", "binding_key": "update_feeds"}, + "cron_queue": {"exchange": "cron_queue", "exchange_type": "direct", "binding_key": "cron_queue"}, "beat_feeds_task": { "exchange": "beat_feeds_task", "exchange_type": "direct", - "binding_key": "beat_feeds_task" + "binding_key": "beat_feeds_task", }, "search_indexer": { "exchange": "search_indexer", "exchange_type": "direct", - "binding_key": "search_indexer" + "binding_key": "search_indexer", }, } CELERY_TASK_DEFAULT_QUEUE = "work_queue" CELERY_WORKER_PREFETCH_MULTIPLIER = 1 -CELERY_IMPORTS = ("apps.rss_feeds.tasks", - "apps.social.tasks", - "apps.reader.tasks", - "apps.profile.tasks", - "apps.feed_import.tasks", - "apps.search.tasks", - "apps.statistics.tasks",) -CELERY_TASK_IGNORE_RESULT = True -CELERY_TASK_ACKS_LATE = True # Retry if task fails +CELERY_IMPORTS = ( + "apps.rss_feeds.tasks", + "apps.social.tasks", + "apps.reader.tasks", + "apps.profile.tasks", + "apps.feed_import.tasks", + "apps.search.tasks", + "apps.statistics.tasks", +) +CELERY_TASK_IGNORE_RESULT = True +CELERY_TASK_ACKS_LATE = True # Retry if task fails CELERY_WORKER_MAX_TASKS_PER_CHILD = 10 -CELERY_TASK_TIME_LIMIT = 12 * 30 -CELERY_WORKER_DISABLE_RATE_LIMITS = True +CELERY_TASK_TIME_LIMIT = 12 * 30 +CELERY_WORKER_DISABLE_RATE_LIMITS = True SECONDS_TO_DELAY_CELERY_EMAILS = 60 CELERY_BEAT_SCHEDULE = { - 'task-feeds': { - 'task': 'task-feeds', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'beat_feeds_task'}, + "task-feeds": { + "task": "task-feeds", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "beat_feeds_task"}, }, - 'task-broken-feeds': { - 'task': 'task-broken-feeds', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'beat_feeds_task'}, + "task-broken-feeds": { + "task": "task-broken-feeds", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "beat_feeds_task"}, }, - 'freshen-homepage': { - 'task': 'freshen-homepage', - 'schedule': datetime.timedelta(hours=1), - 'options': {'queue': 'cron_queue'}, + "freshen-homepage": { + "task": "freshen-homepage", + "schedule": datetime.timedelta(hours=1), + "options": {"queue": "cron_queue"}, }, - 'collect-stats': { - 'task': 'collect-stats', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'cron_queue'}, + "collect-stats": { + "task": "collect-stats", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "cron_queue"}, }, - 'collect-feedback': { - 'task': 'collect-feedback', - 'schedule': datetime.timedelta(minutes=1), - 'options': {'queue': 'cron_queue'}, + "collect-feedback": { + "task": "collect-feedback", + "schedule": datetime.timedelta(minutes=1), + "options": {"queue": "cron_queue"}, }, - 'share-popular-stories': { - 'task': 'share-popular-stories', - 'schedule': datetime.timedelta(minutes=10), - 'options': {'queue': 'cron_queue'}, + "share-popular-stories": { + "task": "share-popular-stories", + "schedule": datetime.timedelta(minutes=10), + "options": {"queue": "cron_queue"}, }, - 'clean-analytics': { - 'task': 'clean-analytics', - 'schedule': datetime.timedelta(hours=12), - 'options': {'queue': 'cron_queue', 'timeout': 720*10}, + "clean-analytics": { + "task": "clean-analytics", + "schedule": datetime.timedelta(hours=12), + "options": {"queue": "cron_queue", "timeout": 720 * 10}, }, - 'reimport-stripe-history': { - 'task': 'reimport-stripe-history', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'cron_queue'}, + "reimport-stripe-history": { + "task": "reimport-stripe-history", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "cron_queue"}, }, # 'clean-spam': { # 'task': 'clean-spam', # 'schedule': datetime.timedelta(hours=1), # 'options': {'queue': 'cron_queue'}, # }, - 'clean-social-spam': { - 'task': 'clean-social-spam', - 'schedule': datetime.timedelta(hours=6), - 'options': {'queue': 'cron_queue'}, + "clean-social-spam": { + "task": "clean-social-spam", + "schedule": datetime.timedelta(hours=6), + "options": {"queue": "cron_queue"}, }, - 'premium-expire': { - 'task': 'premium-expire', - 'schedule': datetime.timedelta(hours=24), - 'options': {'queue': 'cron_queue'}, + "premium-expire": { + "task": "premium-expire", + "schedule": datetime.timedelta(hours=24), + "options": {"queue": "cron_queue"}, }, - 'activate-next-new-user': { - 'task': 'activate-next-new-user', - 'schedule': datetime.timedelta(minutes=5), - 'options': {'queue': 'cron_queue'}, + "activate-next-new-user": { + "task": "activate-next-new-user", + "schedule": datetime.timedelta(minutes=5), + "options": {"queue": "cron_queue"}, }, } @@ -528,32 +477,33 @@ else: MONGO_PORT = 27017 MONGO_DB = { - 'host': f'db_mongo:{MONGO_PORT}', - 'name': 'newsblur', + "host": f"db_mongo:{MONGO_PORT}", + "name": "newsblur", } MONGO_ANALYTICS_DB = { - 'host': f'db_mongo_analytics:{MONGO_PORT}', - 'name': 'nbanalytics', + "host": f"db_mongo_analytics:{MONGO_PORT}", + "name": "nbanalytics", } # ==================== # = Database Routers = # ==================== + class MasterSlaveRouter(object): """A router that sets up a simple master/slave configuration""" def db_for_read(self, model, **hints): "Point all read operations to a random slave" - return 'slave' + return "slave" def db_for_write(self, model, **hints): "Point all write operations to the master" - return 'default' + return "default" def allow_relation(self, obj1, obj2, **hints): "Allow any relation between two objects in the db pool" - db_list = ('slave','default') + db_list = ("slave", "default") if obj1._state.db in db_list and obj2._state.db in db_list: return True return None @@ -567,11 +517,11 @@ def allow_migrate(self, db, model): # = Social APIs = # =============== -FACEBOOK_APP_ID = '111111111111111' -FACEBOOK_SECRET = '99999999999999999999999999999999' -FACEBOOK_NAMESPACE = 'newsblur' -TWITTER_CONSUMER_KEY = 'ooooooooooooooooooooo' -TWITTER_CONSUMER_SECRET = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' +FACEBOOK_APP_ID = "111111111111111" +FACEBOOK_SECRET = "99999999999999999999999999999999" +FACEBOOK_NAMESPACE = "newsblur" +TWITTER_CONSUMER_KEY = "ooooooooooooooooooooo" +TWITTER_CONSUMER_SECRET = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" YOUTUBE_API_KEY = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # =============== @@ -580,15 +530,15 @@ def allow_migrate(self, db, model): BACKED_BY_AWS = { - 'pages_on_s3': False, - 'icons_on_s3': False, + "pages_on_s3": False, + "icons_on_s3": False, } PROXY_S3_PAGES = True -S3_BACKUP_BUCKET = 'newsblur-backups' -S3_PAGES_BUCKET_NAME = 'pages.newsblur.com' -S3_ICONS_BUCKET_NAME = 'icons.newsblur.com' -S3_AVATARS_BUCKET_NAME = 'avatars.newsblur.com' +S3_BACKUP_BUCKET = "newsblur-backups" +S3_PAGES_BUCKET_NAME = "pages.newsblur.com" +S3_ICONS_BUCKET_NAME = "icons.newsblur.com" +S3_AVATARS_BUCKET_NAME = "avatars.newsblur.com" # ================== # = Configurations = @@ -605,12 +555,14 @@ def allow_migrate(self, db, model): started_task_or_app = False try: from newsblur_web.task_env import * + print(" ---> Starting NewsBlur task server...") started_task_or_app = True except ModuleNotFoundError: pass try: from newsblur_web.app_env import * + print(" ---> Starting NewsBlur app server...") started_task_or_app = True except ModuleNotFoundError: @@ -619,34 +571,29 @@ def allow_migrate(self, db, model): print(" ---> Starting NewsBlur development server...") if DOCKERBUILD: - CELERY_WORKER_CONCURRENCY = 2 + CELERY_WORKER_CONCURRENCY = 2 elif "task-work" in SERVER_NAME or SERVER_NAME.startswith("task-"): - CELERY_WORKER_CONCURRENCY = 4 + CELERY_WORKER_CONCURRENCY = 4 else: - CELERY_WORKER_CONCURRENCY = 24 - -if not DEBUG: - INSTALLED_APPS += ( - 'django_ses', + CELERY_WORKER_CONCURRENCY = 24 - ) +if not DEBUG: + INSTALLED_APPS += ("django_ses",) sentry_sdk.init( dsn=SENTRY_DSN, integrations=[DjangoIntegration(), RedisIntegration(), CeleryIntegration()], server_name=SERVER_NAME, - # Set traces_sample_rate to 1.0 to capture 100% # of transactions for performance monitoring. # We recommend adjusting this value in production, traces_sample_rate=0.01, - # If you wish to associate users to errors (assuming you are using # django.contrib.auth) you may enable sending PII data. - send_default_pii=True + send_default_pii=True, ) sentry_sdk.utils.MAX_STRING_LENGTH = 8192 - + COMPRESS = not DEBUG ACCOUNT_ACTIVATION_DAYS = 30 AWS_ACCESS_KEY_ID = S3_ACCESS_KEY @@ -655,10 +602,11 @@ def allow_migrate(self, db, model): os.environ["AWS_ACCESS_KEY_ID"] = AWS_ACCESS_KEY_ID os.environ["AWS_SECRET_ACCESS_KEY"] = AWS_SECRET_ACCESS_KEY + def clear_prometheus_aggregation_stats(): - prom_folder = '/srv/newsblur/.prom_cache' + prom_folder = "/srv/newsblur/.prom_cache" os.makedirs(prom_folder, exist_ok=True) - os.environ['PROMETHEUS_MULTIPROC_DIR'] = prom_folder + os.environ["PROMETHEUS_MULTIPROC_DIR"] = prom_folder for filename in os.listdir(prom_folder): file_path = os.path.join(prom_folder, filename) try: @@ -667,24 +615,27 @@ def clear_prometheus_aggregation_stats(): elif os.path.isdir(file_path): shutil.rmtree(file_path) except Exception as e: - if 'No such file' in str(e): + if "No such file" in str(e): return - print('Failed to delete %s. Reason: %s' % (file_path, e)) + print("Failed to delete %s. Reason: %s" % (file_path, e)) clear_prometheus_aggregation_stats() if DEBUG: template_loaders = [ - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", ] else: template_loaders = [ - ('django.template.loaders.cached.Loader', ( - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - )), + ( + "django.template.loaders.cached.Loader", + ( + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ), + ), ] @@ -692,19 +643,21 @@ def clear_prometheus_aggregation_stats(): TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [os.path.join(NEWSBLUR_DIR, 'templates'), - os.path.join(NEWSBLUR_DIR, 'vendor/zebra/templates')], + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [ + os.path.join(NEWSBLUR_DIR, "templates"), + os.path.join(NEWSBLUR_DIR, "vendor/zebra/templates"), + ], # 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ + "OPTIONS": { + "context_processors": [ "django.contrib.auth.context_processors.auth", "django.template.context_processors.debug", "django.template.context_processors.media", - 'django.template.context_processors.request', - 'django.contrib.messages.context_processors.messages', + "django.template.context_processors.request", + "django.contrib.messages.context_processors.messages", ], - 'loaders': template_loaders, + "loaders": template_loaders, }, } ] @@ -726,14 +679,14 @@ def clear_prometheus_aggregation_stats(): monitoring.register(MONGO_COMMAND_LOGGER) MONGO_DB_DEFAULTS = { - 'name': 'newsblur', - 'host': f'db_mongo:{MONGO_PORT}', - 'alias': 'default', - 'unicode_decode_error_handler': 'ignore', - 'connect': False, + "name": "newsblur", + "host": f"db_mongo:{MONGO_PORT}", + "alias": "default", + "unicode_decode_error_handler": "ignore", + "connect": False, } MONGO_DB = dict(MONGO_DB_DEFAULTS, **MONGO_DB) -MONGO_DB_NAME = MONGO_DB.pop('name') +MONGO_DB_NAME = MONGO_DB.pop("name") # MONGO_URI = 'mongodb://%s' % (MONGO_DB.pop('host'),) # if MONGO_DB.get('read_preference', pymongo.ReadPreference.PRIMARY) != pymongo.ReadPreference.PRIMARY: @@ -747,18 +700,24 @@ def clear_prometheus_aggregation_stats(): # MONGODB = connect(host="mongodb://localhost:27017/newsblur", connect=False) MONGO_ANALYTICS_DB_DEFAULTS = { - 'name': 'nbanalytics', - 'host': f'db_mongo_analytics:{MONGO_PORT}', - 'alias': 'nbanalytics', + "name": "nbanalytics", + "host": f"db_mongo_analytics:{MONGO_PORT}", + "alias": "nbanalytics", } MONGO_ANALYTICS_DB = dict(MONGO_ANALYTICS_DB_DEFAULTS, **MONGO_ANALYTICS_DB) # MONGO_ANALYTICS_DB_NAME = MONGO_ANALYTICS_DB.pop('name') # MONGOANALYTICSDB = connect(MONGO_ANALYTICS_DB_NAME, **MONGO_ANALYTICS_DB) -if 'username' in MONGO_ANALYTICS_DB: - MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['username']}:{MONGO_ANALYTICS_DB['password']}@{MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics") +if "username" in MONGO_ANALYTICS_DB: + MONGOANALYTICSDB = connect( + db=MONGO_ANALYTICS_DB["name"], + host=f"mongodb://{MONGO_ANALYTICS_DB['username']}:{MONGO_ANALYTICS_DB['password']}@{MONGO_ANALYTICS_DB['host']}/?authSource=admin", + alias="nbanalytics", + ) else: - MONGOANALYTICSDB = connect(db=MONGO_ANALYTICS_DB['name'], host=f"mongodb://{MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics") + MONGOANALYTICSDB = connect( + db=MONGO_ANALYTICS_DB["name"], host=f"mongodb://{MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics" + ) # ========= @@ -777,149 +736,164 @@ def clear_prometheus_aggregation_stats(): REDIS_PUBSUB_PORT = 6383 if REDIS_USER is None: - # REDIS has been renamed to REDIS_USER. + # REDIS has been renamed to REDIS_USER. REDIS_USER = REDIS CELERY_REDIS_DB_NUM = 4 SESSION_REDIS_DB = 5 -CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS_USER['host'], REDIS_USER_PORT,CELERY_REDIS_DB_NUM) +CELERY_BROKER_URL = "redis://%s:%s/%s" % (REDIS_USER["host"], REDIS_USER_PORT, CELERY_REDIS_DB_NUM) CELERY_RESULT_BACKEND = CELERY_BROKER_URL -BROKER_TRANSPORT_OPTIONS = { - "max_retries": 3, - "interval_start": 0, - "interval_step": 0.2, - "interval_max": 0.5 -} +BROKER_TRANSPORT_OPTIONS = {"max_retries": 3, "interval_start": 0, "interval_step": 0.2, "interval_max": 0.5} SESSION_REDIS = { - 'host': REDIS_SESSIONS['host'], - 'port': REDIS_SESSION_PORT, - 'db': SESSION_REDIS_DB, + "host": REDIS_SESSIONS["host"], + "port": REDIS_SESSION_PORT, + "db": SESSION_REDIS_DB, # 'password': 'password', - 'prefix': '', - 'socket_timeout': 10, - 'retry_on_timeout': True + "prefix": "", + "socket_timeout": 10, + "retry_on_timeout": True, } CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': 'redis://%s:%s/6' % (REDIS_USER['host'], REDIS_USER_PORT), + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": "redis://%s:%s/6" % (REDIS_USER["host"], REDIS_USER_PORT), }, } -REDIS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=0, decode_responses=True) -REDIS_ANALYTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=2, decode_responses=True) -REDIS_STATISTICS_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=3, decode_responses=True) -REDIS_FEED_UPDATE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=4, decode_responses=True) -REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=10, decode_responses=True) +REDIS_POOL = redis.ConnectionPool(host=REDIS_USER["host"], port=REDIS_USER_PORT, db=0, decode_responses=True) +REDIS_ANALYTICS_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=2, decode_responses=True +) +REDIS_STATISTICS_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=3, decode_responses=True +) +REDIS_FEED_UPDATE_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=4, decode_responses=True +) +REDIS_STORY_HASH_TEMP_POOL = redis.ConnectionPool( + host=REDIS_USER["host"], port=REDIS_USER_PORT, db=10, decode_responses=True +) # REDIS_CACHE_POOL = redis.ConnectionPool(host=REDIS_USER['host'], port=REDIS_USER_PORT, db=6) # Duped in CACHES -REDIS_STORY_HASH_POOL = redis.ConnectionPool(host=REDIS_STORY['host'], port=REDIS_STORY_PORT, db=1, decode_responses=True) -REDIS_FEED_READ_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=1, decode_responses=True) -REDIS_FEED_SUB_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=2, decode_responses=True) -REDIS_SESSION_POOL = redis.ConnectionPool(host=REDIS_SESSIONS['host'], port=REDIS_SESSION_PORT, db=5, decode_responses=True) -REDIS_PUBSUB_POOL = redis.ConnectionPool(host=REDIS_PUBSUB['host'], port=REDIS_PUBSUB_PORT, db=0, decode_responses=True) +REDIS_STORY_HASH_POOL = redis.ConnectionPool( + host=REDIS_STORY["host"], port=REDIS_STORY_PORT, db=1, decode_responses=True +) +REDIS_FEED_READ_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=1, decode_responses=True +) +REDIS_FEED_SUB_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=2, decode_responses=True +) +REDIS_SESSION_POOL = redis.ConnectionPool( + host=REDIS_SESSIONS["host"], port=REDIS_SESSION_PORT, db=5, decode_responses=True +) +REDIS_PUBSUB_POOL = redis.ConnectionPool( + host=REDIS_PUBSUB["host"], port=REDIS_PUBSUB_PORT, db=0, decode_responses=True +) # ========== # = Celery = # ========== # celeryapp.autodiscover_tasks(INSTALLED_APPS) -accept_content = ['pickle', 'json', 'msgpack', 'yaml'] +accept_content = ["pickle", "json", "msgpack", "yaml"] # ========== # = Assets = # ========== -STATIC_URL = '/static/' +STATIC_URL = "/static/" # STATICFILES_STORAGE = 'pipeline.storage.PipelineManifestStorage' -STATICFILES_STORAGE = 'utils.pipeline_utils.PipelineStorage' +STATICFILES_STORAGE = "utils.pipeline_utils.PipelineStorage" # STATICFILES_STORAGE = 'utils.pipeline_utils.GzipPipelineStorage' STATICFILES_FINDERS = ( # 'pipeline.finders.FileSystemFinder', # 'django.contrib.staticfiles.finders.FileSystemFinder', # 'django.contrib.staticfiles.finders.AppDirectoriesFinder', # 'pipeline.finders.AppDirectoriesFinder', - 'utils.pipeline_utils.AppDirectoriesFinder', - 'utils.pipeline_utils.FileSystemFinder', + "utils.pipeline_utils.AppDirectoriesFinder", + "utils.pipeline_utils.FileSystemFinder", # 'pipeline.finders.PipelineFinder', ) STATICFILES_DIRS = [ # '/usr/local/lib/python3.9/site-packages/django/contrib/admin/static/', MEDIA_ROOT, ] -with open(os.path.join(SETTINGS_DIR, 'assets.yml')) as stream: +with open(os.path.join(SETTINGS_DIR, "assets.yml")) as stream: assets = yaml.safe_load(stream) PIPELINE = { - 'PIPELINE_ENABLED': not DEBUG_ASSETS, - 'PIPELINE_COLLECTOR_ENABLED': not DEBUG_ASSETS, - 'SHOW_ERRORS_INLINE': DEBUG_ASSETS, - 'CSS_COMPRESSOR': 'pipeline.compressors.yuglify.YuglifyCompressor', - 'JS_COMPRESSOR': 'pipeline.compressors.closure.ClosureCompressor', + "PIPELINE_ENABLED": not DEBUG_ASSETS, + "PIPELINE_COLLECTOR_ENABLED": not DEBUG_ASSETS, + "SHOW_ERRORS_INLINE": DEBUG_ASSETS, + "CSS_COMPRESSOR": "pipeline.compressors.yuglify.YuglifyCompressor", + "JS_COMPRESSOR": "pipeline.compressors.closure.ClosureCompressor", # 'CSS_COMPRESSOR': 'pipeline.compressors.NoopCompressor', # 'JS_COMPRESSOR': 'pipeline.compressors.NoopCompressor', - 'CLOSURE_BINARY': '/usr/bin/java -jar /usr/local/bin/compiler.jar', - 'CLOSURE_ARGUMENTS': '--language_in ECMASCRIPT_2016 --language_out ECMASCRIPT_2016 --warning_level DEFAULT', - 'JAVASCRIPT': { - 'common': { - 'source_filenames': assets['javascripts']['common'], - 'output_filename': 'js/common.js', + "CLOSURE_BINARY": "/usr/bin/java -jar /usr/local/bin/compiler.jar", + "CLOSURE_ARGUMENTS": "--language_in ECMASCRIPT_2016 --language_out ECMASCRIPT_2016 --warning_level DEFAULT", + "JAVASCRIPT": { + "common": { + "source_filenames": assets["javascripts"]["common"], + "output_filename": "js/common.js", }, - 'statistics': { - 'source_filenames': assets['javascripts']['statistics'], - 'output_filename': 'js/statistics.js', + "statistics": { + "source_filenames": assets["javascripts"]["statistics"], + "output_filename": "js/statistics.js", }, - 'payments': { - 'source_filenames': assets['javascripts']['payments'], - 'output_filename': 'js/payments.js', + "payments": { + "source_filenames": assets["javascripts"]["payments"], + "output_filename": "js/payments.js", }, - 'bookmarklet': { - 'source_filenames': assets['javascripts']['bookmarklet'], - 'output_filename': 'js/bookmarklet.js', + "bookmarklet": { + "source_filenames": assets["javascripts"]["bookmarklet"], + "output_filename": "js/bookmarklet.js", }, - 'blurblog': { - 'source_filenames': assets['javascripts']['blurblog'], - 'output_filename': 'js/blurblog.js', + "blurblog": { + "source_filenames": assets["javascripts"]["blurblog"], + "output_filename": "js/blurblog.js", }, }, - 'STYLESHEETS': { - 'common': { - 'source_filenames': assets['stylesheets']['common'], - 'output_filename': 'css/common.css', + "STYLESHEETS": { + "common": { + "source_filenames": assets["stylesheets"]["common"], + "output_filename": "css/common.css", # 'variant': 'datauri', }, - 'bookmarklet': { - 'source_filenames': assets['stylesheets']['bookmarklet'], - 'output_filename': 'css/bookmarklet.css', + "bookmarklet": { + "source_filenames": assets["stylesheets"]["bookmarklet"], + "output_filename": "css/bookmarklet.css", # 'variant': 'datauri', }, - 'blurblog': { - 'source_filenames': assets['stylesheets']['blurblog'], - 'output_filename': 'css/blurblog.css', + "blurblog": { + "source_filenames": assets["stylesheets"]["blurblog"], + "output_filename": "css/blurblog.css", # 'variant': 'datauri', }, - } + }, } -paypalrestsdk.configure({ - "mode": "sandbox" if DEBUG else "live", - "client_id": PAYPAL_API_CLIENTID, - "client_secret": PAYPAL_API_SECRET -}) +paypalrestsdk.configure( + { + "mode": "sandbox" if DEBUG else "live", + "client_id": PAYPAL_API_CLIENTID, + "client_secret": PAYPAL_API_SECRET, + } +) # ======= # = AWS = # ======= S3_CONN = None -if BACKED_BY_AWS.get('pages_on_s3') or BACKED_BY_AWS.get('icons_on_s3'): +if BACKED_BY_AWS.get("pages_on_s3") or BACKED_BY_AWS.get("icons_on_s3"): boto_session = boto3.Session( aws_access_key_id=S3_ACCESS_KEY, aws_secret_access_key=S3_SECRET, ) - S3_CONN = boto_session.resource('s3') + S3_CONN = boto_session.resource("s3") django.http.request.host_validation_re = re.compile(r"^([a-z0-9.-_\-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$") @@ -940,6 +914,7 @@ def monkey_patched_get_user(request): and when this monkey patch is removed. """ from django.contrib.auth.models import AnonymousUser + user = None try: user_id = auth._get_user_session_key(request) @@ -951,7 +926,11 @@ def monkey_patched_get_user(request): backend = auth.load_backend(backend_path) user = backend.get_user(user_id) session_hash = request.session.get(auth.HASH_SESSION_KEY) - logging.debug(request, " ---> Ignoring session hash: %s vs %s" % (user.get_session_auth_hash() if user else "[no user]", session_hash)) + logging.debug( + request, + " ---> Ignoring session hash: %s vs %s" + % (user.get_session_auth_hash() if user else "[no user]", session_hash), + ) # # Verify the session # if hasattr(user, 'get_session_auth_hash'): # session_hash = request.session.get(HASH_SESSION_KEY) @@ -965,4 +944,5 @@ def monkey_patched_get_user(request): return user or AnonymousUser() + auth.get_user = monkey_patched_get_user diff --git a/newsblur_web/sitecustomize.py b/newsblur_web/sitecustomize.py index 80ae27febd..0fb429ec57 100644 --- a/newsblur_web/sitecustomize.py +++ b/newsblur_web/sitecustomize.py @@ -1,7 +1,8 @@ import sys -sys.setdefaultencoding('utf-8') + +sys.setdefaultencoding("utf-8") import os -os.putenv('LANG', 'en_US.UTF-8') -os.putenv('LC_ALL', 'en_US.UTF-8') +os.putenv("LANG", "en_US.UTF-8") +os.putenv("LC_ALL", "en_US.UTF-8") diff --git a/newsblur_web/test_settings.py b/newsblur_web/test_settings.py index bdc58a4490..a4480bda0b 100644 --- a/newsblur_web/test_settings.py +++ b/newsblur_web/test_settings.py @@ -1,13 +1,15 @@ import os + DOCKERBUILD = os.getenv("DOCKERBUILD") from newsblur_web.settings import * -DATABASES['default']['ENGINE'] = 'django.db.backends.sqlite3' -DATABASES['default']['OPTIONS'] = {} -DATABASES['default']['NAME'] = 'nb.db' -DATABASES['default']['TEST_NAME'] = os.path.join(BASE_DIR, 'db.sqlite3.test') + +DATABASES["default"]["ENGINE"] = "django.db.backends.sqlite3" +DATABASES["default"]["OPTIONS"] = {} +DATABASES["default"]["NAME"] = "nb.db" +DATABASES["default"]["TEST_NAME"] = os.path.join(BASE_DIR, "db.sqlite3.test") -#DATABASES['default'] = { +# DATABASES['default'] = { # 'NAME': 'newslur', # 'ENGINE': 'django.db.backends.postgresql_psycopg2', # 'USER': 'newsblur', @@ -29,19 +31,19 @@ if DOCKERBUILD: MONGO_PORT = 29019 MONGO_DB = { - 'name': 'newsblur_test', - 'host': 'db_mongo:29019', + "name": "newsblur_test", + "host": "db_mongo:29019", } else: MONGO_PORT = 27017 MONGO_DB = { - 'name': 'newsblur_test', - 'host': '127.0.0.1:27017', + "name": "newsblur_test", + "host": "127.0.0.1:27017", } SERVER_NAME -MONGO_DATABASE_NAME = 'test_newsblur' +MONGO_DATABASE_NAME = "test_newsblur" SOUTH_TESTS_MIGRATE = False DAYS_OF_UNREAD = 9999 @@ -50,5 +52,5 @@ DEBUG = True SITE_ID = 2 SENTRY_DSN = None -HOMEPAGE_USERNAME = 'conesus' -SERVER_NAME = 'test_newsblur' +HOMEPAGE_USERNAME = "conesus" +SERVER_NAME = "test_newsblur" diff --git a/newsblur_web/urls.py b/newsblur_web/urls.py index 3520243b8e..dd5950fe67 100644 --- a/newsblur_web/urls.py +++ b/newsblur_web/urls.py @@ -11,73 +11,79 @@ admin.autodiscover() urlpatterns = [ - url(r'^$', reader_views.index, name='index'), - url(r'^reader/', include('apps.reader.urls')), - url(r'^add/?', reader_views.index), - url(r'^try/?', reader_views.index), - url(r'^site/(?P\d+)?', reader_views.index), - url(r'^folder/(?P\d+)?', reader_views.index, name='folder'), - url(r'^saved/(?P\d+)?', reader_views.index, name='saved-stories-tag'), - url(r'^saved/?', reader_views.index), - url(r'^read/?', reader_views.index), - url(r'^social/\d+/.*?', reader_views.index), - url(r'^user/.*?', reader_views.index), - url(r'^null/.*?', reader_views.index), - url(r'^story/.*?', reader_views.index), - url(r'^feed/?', social_views.shared_stories_rss_feed_noid), - url(r'^rss_feeds/', include('apps.rss_feeds.urls')), - url(r'^analyzer/', include('apps.analyzer.urls')), - url(r'^classifier/', include('apps.analyzer.urls')), - url(r'^folder_rss/', include('apps.profile.urls')), - url(r'^profile/', include('apps.profile.urls')), - url(r'^import/', include('apps.feed_import.urls')), - url(r'^api/', include('apps.api.urls')), - url(r'^recommendations/', include('apps.recommendations.urls')), - url(r'^notifications/?', include('apps.notifications.urls')), - url(r'^statistics/', include('apps.statistics.urls')), - url(r'^social/', include('apps.social.urls')), - url(r'^search/', include('apps.search.urls')), - url(r'^oauth/', include('apps.oauth.urls')), - url(r'^mobile/', include('apps.mobile.urls')), - url(r'^m/', include('apps.mobile.urls')), - url(r'^push/', include('apps.push.urls')), - url(r'^newsletters/', include('apps.newsletters.urls')), - url(r'^categories/', include('apps.categories.urls')), - url(r'^_haproxychk', static_views.haproxy_check), - url(r'^_dbcheck/postgres', static_views.postgres_check), - url(r'^_dbcheck/mongo', static_views.mongo_check), - url(r'^_dbcheck/redis', static_views.redis_check), - url(r'^_dbcheck/elasticsearch', static_views.elasticsearch_check), - url(r'^admin/', admin.site.urls), - url(r'^about/?', static_views.about, name='about'), - url(r'^faq/?', static_views.faq, name='faq'), - url(r'^api/?$', static_views.api, name='api'), - url(r'^press/?', static_views.press, name='press'), - url(r'^feedback/?', static_views.feedback, name='feedback'), - url(r'^privacy/?', static_views.privacy, name='privacy'), - url(r'^tos/?', static_views.tos, name='tos'), - url(r'^manifest.webmanifest', static_views.webmanifest, name='webmanifest'), - url(r'^.well-known/apple-app-site-association', static_views.apple_app_site_assoc, name='apple-app-site-assoc'), - url(r'^.well-known/apple-developer-merchantid-domain-association', static_views.apple_developer_merchantid, name='apple-developer-merchantid'), - url(r'^ios/download/?', static_views.ios_download, name='ios-download'), - url(r'^ios/NewsBlur.plist', static_views.ios_plist, name='ios-download-plist'), - url(r'^ios/NewsBlur.ipa', static_views.ios_ipa, name='ios-download-ipa'), - url(r'^ios/?', static_views.ios, name='ios-static'), - url(r'^iphone/?', static_views.ios), - url(r'^ipad/?', static_views.ios), - url(r'^android/?', static_views.android, name='android-static'), - url(r'^firefox/?', static_views.firefox, name='firefox'), - url(r'zebra/', include('zebra.urls', namespace="zebra")), - url(r'^account/redeem_code/?$', profile_views.redeem_code, name='redeem-code'), - url(r'^account/login/?$', profile_views.login, name='login'), - url(r'^account/signup/?$', profile_views.signup, name='signup'), - url(r'^account/logout/?$', - LogoutView, - {'next_page': '/'}, name='logout'), - url(r'^account/ifttt/v1/', include('apps.oauth.urls')), - url(r'^account/', include('oauth2_provider.urls', namespace='oauth2_provider')), - url(r'^monitor/', include('apps.monitor.urls'), name="monitor"), - url('', include('django_prometheus.urls')), + url(r"^$", reader_views.index, name="index"), + url(r"^reader/", include("apps.reader.urls")), + url(r"^add/?", reader_views.index), + url(r"^try/?", reader_views.index), + url(r"^site/(?P\d+)?", reader_views.index), + url(r"^folder/(?P\d+)?", reader_views.index, name="folder"), + url(r"^saved/(?P\d+)?", reader_views.index, name="saved-stories-tag"), + url(r"^saved/?", reader_views.index), + url(r"^read/?", reader_views.index), + url(r"^social/\d+/.*?", reader_views.index), + url(r"^user/.*?", reader_views.index), + url(r"^null/.*?", reader_views.index), + url(r"^story/.*?", reader_views.index), + url(r"^feed/?", social_views.shared_stories_rss_feed_noid), + url(r"^rss_feeds/", include("apps.rss_feeds.urls")), + url(r"^analyzer/", include("apps.analyzer.urls")), + url(r"^classifier/", include("apps.analyzer.urls")), + url(r"^folder_rss/", include("apps.profile.urls")), + url(r"^profile/", include("apps.profile.urls")), + url(r"^import/", include("apps.feed_import.urls")), + url(r"^api/", include("apps.api.urls")), + url(r"^recommendations/", include("apps.recommendations.urls")), + url(r"^notifications/?", include("apps.notifications.urls")), + url(r"^statistics/", include("apps.statistics.urls")), + url(r"^social/", include("apps.social.urls")), + url(r"^search/", include("apps.search.urls")), + url(r"^oauth/", include("apps.oauth.urls")), + url(r"^mobile/", include("apps.mobile.urls")), + url(r"^m/", include("apps.mobile.urls")), + url(r"^push/", include("apps.push.urls")), + url(r"^newsletters/", include("apps.newsletters.urls")), + url(r"^categories/", include("apps.categories.urls")), + url(r"^_haproxychk", static_views.haproxy_check), + url(r"^_dbcheck/postgres", static_views.postgres_check), + url(r"^_dbcheck/mongo", static_views.mongo_check), + url(r"^_dbcheck/redis", static_views.redis_check), + url(r"^_dbcheck/elasticsearch", static_views.elasticsearch_check), + url(r"^admin/", admin.site.urls), + url(r"^about/?", static_views.about, name="about"), + url(r"^faq/?", static_views.faq, name="faq"), + url(r"^api/?$", static_views.api, name="api"), + url(r"^press/?", static_views.press, name="press"), + url(r"^feedback/?", static_views.feedback, name="feedback"), + url(r"^privacy/?", static_views.privacy, name="privacy"), + url(r"^tos/?", static_views.tos, name="tos"), + url(r"^manifest.webmanifest", static_views.webmanifest, name="webmanifest"), + url( + r"^.well-known/apple-app-site-association", + static_views.apple_app_site_assoc, + name="apple-app-site-assoc", + ), + url( + r"^.well-known/apple-developer-merchantid-domain-association", + static_views.apple_developer_merchantid, + name="apple-developer-merchantid", + ), + url(r"^ios/download/?", static_views.ios_download, name="ios-download"), + url(r"^ios/NewsBlur.plist", static_views.ios_plist, name="ios-download-plist"), + url(r"^ios/NewsBlur.ipa", static_views.ios_ipa, name="ios-download-ipa"), + url(r"^ios/?", static_views.ios, name="ios-static"), + url(r"^iphone/?", static_views.ios), + url(r"^ipad/?", static_views.ios), + url(r"^android/?", static_views.android, name="android-static"), + url(r"^firefox/?", static_views.firefox, name="firefox"), + url(r"zebra/", include("zebra.urls", namespace="zebra")), + url(r"^account/redeem_code/?$", profile_views.redeem_code, name="redeem-code"), + url(r"^account/login/?$", profile_views.login, name="login"), + url(r"^account/signup/?$", profile_views.signup, name="signup"), + url(r"^account/logout/?$", LogoutView, {"next_page": "/"}, name="logout"), + url(r"^account/ifttt/v1/", include("apps.oauth.urls")), + url(r"^account/", include("oauth2_provider.urls", namespace="oauth2_provider")), + url(r"^monitor/", include("apps.monitor.urls"), name="monitor"), + url("", include("django_prometheus.urls")), ] if settings.DEBUG: diff --git a/newsblur_web/wsgi.py b/newsblur_web/wsgi.py index cfbfbc22fe..2f071b032b 100644 --- a/newsblur_web/wsgi.py +++ b/newsblur_web/wsgi.py @@ -6,7 +6,9 @@ """ import os + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "newsblur_web.settings") from django.core.wsgi import get_wsgi_application -application = get_wsgi_application() \ No newline at end of file + +application = get_wsgi_application() diff --git a/perf/locust.py b/perf/locust.py index 38587d8efa..15d748547e 100644 --- a/perf/locust.py +++ b/perf/locust.py @@ -3,6 +3,7 @@ import os import requests + class NB_PerfTest(HttpUser): wait_time = between(1, 2.5) diff --git a/utils/PyRSS2Gen.py b/utils/PyRSS2Gen.py index 8a4ff827e6..19bb11cd5b 100644 --- a/utils/PyRSS2Gen.py +++ b/utils/PyRSS2Gen.py @@ -8,16 +8,18 @@ import datetime + # Could make this the base class; will need to add 'publish' class WriteXmlMixin: - def write_xml(self, outfile, encoding = "iso-8859-1"): + def write_xml(self, outfile, encoding="iso-8859-1"): from xml.sax import saxutils + handler = saxutils.XMLGenerator(outfile, encoding) handler.startDocument() self.publish(handler) handler.endDocument() - def to_xml(self, encoding = "iso-8859-1"): + def to_xml(self, encoding="iso-8859-1"): try: import io as StringIO except ImportError: @@ -27,7 +29,7 @@ def to_xml(self, encoding = "iso-8859-1"): return f.getvalue() -def _element(handler, name, obj, d = {}): +def _element(handler, name, obj, d={}): if isinstance(obj, str) or obj is None: # special-case handling to make the API easier # to use for the common case. @@ -39,6 +41,7 @@ def _element(handler, name, obj, d = {}): # It better know how to emit the correct XML. obj.publish(handler) + def _opt_element(handler, name, obj): if obj is None: return @@ -58,13 +61,16 @@ def _format_date(dt): # rfc822 and email.Utils modules assume a timestamp. The # following is based on the rfc822 module. return "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( - ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()], - dt.day, - ["Jan", "Feb", "Mar", "Apr", "May", "Jun", - "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][dt.month-1], - dt.year, dt.hour, dt.minute, dt.second) + ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][dt.weekday()], + dt.day, + ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][dt.month - 1], + dt.year, + dt.hour, + dt.minute, + dt.second, + ) + - ## # A couple simple wrapper objects for the fields which # take a simple value other than a string. @@ -72,19 +78,23 @@ class IntElement: """implements the 'publish' API for integers Takes the tag name and the integer value to publish. - + (Could be used for anything which uses str() to be published to text for XML.) """ + element_attrs = {} + def __init__(self, name, val): self.name = name self.val = val + def publish(self, handler): handler.startElement(self.name, self.element_attrs) handler.characters(str(self.val)) handler.endElement(self.name) + class DateElement: """implements the 'publish' API for a datetime.datetime @@ -92,53 +102,70 @@ class DateElement: Converts the datetime to RFC 2822 timestamp (4-digit year). """ + def __init__(self, name, dt): self.name = name self.dt = dt + def publish(self, handler): _element(handler, self.name, _format_date(self.dt)) + + #### + class Category: """Publish a category element""" - def __init__(self, category, domain = None): + + def __init__(self, category, domain=None): self.category = category self.domain = domain + def publish(self, handler): d = {} if self.domain is not None: d["domain"] = self.domain _element(handler, "category", self.category, d) + class Cloud: """Publish a cloud""" - def __init__(self, domain, port, path, - registerProcedure, protocol): + + def __init__(self, domain, port, path, registerProcedure, protocol): self.domain = domain self.port = port self.path = path self.registerProcedure = registerProcedure self.protocol = protocol + def publish(self, handler): - _element(handler, "cloud", None, { - "domain": self.domain, - "port": str(self.port), - "path": self.path, - "registerProcedure": self.registerProcedure, - "protocol": self.protocol}) + _element( + handler, + "cloud", + None, + { + "domain": self.domain, + "port": str(self.port), + "path": self.path, + "registerProcedure": self.registerProcedure, + "protocol": self.protocol, + }, + ) + class Image: """Publish a channel Image""" + element_attrs = {} - def __init__(self, url, title, link, - width = None, height = None, description = None): + + def __init__(self, url, title, link, width=None, height=None, description=None): self.url = url self.title = title self.link = link self.width = width self.height = height self.description = description - + def publish(self, handler): handler.startElement("image", self.element_attrs) @@ -150,7 +177,7 @@ def publish(self, handler): if isinstance(width, int): width = IntElement("width", width) _opt_element(handler, "width", width) - + height = self.height if isinstance(height, int): height = IntElement("height", height) @@ -160,15 +187,18 @@ def publish(self, handler): handler.endElement("image") + class Guid: """Publish a guid Defaults to being a permalink, which is the assumption if it's omitted. Hence strings are always permalinks. """ - def __init__(self, guid, isPermaLink = 1): + + def __init__(self, guid, isPermaLink=1): self.guid = guid self.isPermaLink = isPermaLink + def publish(self, handler): d = {} if self.isPermaLink: @@ -177,12 +207,15 @@ def publish(self, handler): d["isPermaLink"] = "false" _element(handler, "guid", self.guid, d) + class TextInput: """Publish a textInput Apparently this is rarely used. """ + element_attrs = {} + def __init__(self, title, description, name, link): self.title = title self.description = description @@ -196,37 +229,51 @@ def publish(self, handler): _element(handler, "name", self.name) _element(handler, "link", self.link) handler.endElement("textInput") - + class Enclosure: """Publish an enclosure""" + def __init__(self, url, length, type): self.url = url self.length = length self.type = type + def publish(self, handler): - _element(handler, "enclosure", None, - {"url": self.url, - "length": str(self.length), - "type": self.type, - }) + _element( + handler, + "enclosure", + None, + { + "url": self.url, + "length": str(self.length), + "type": self.type, + }, + ) + class Source: """Publish the item's original source, used by aggregators""" + def __init__(self, name, url): self.name = name self.url = url + def publish(self, handler): _element(handler, "source", self.name, {"url": self.url}) + class SkipHours: """Publish the skipHours This takes a list of hours, as integers. """ + element_attrs = {} + def __init__(self, hours): self.hours = hours + def publish(self, handler): if self.hours: handler.startElement("skipHours", self.element_attrs) @@ -234,14 +281,18 @@ def publish(self, handler): _element(handler, "hour", str(hour)) handler.endElement("skipHours") + class SkipDays: """Publish the skipDays This takes a list of days as strings. """ + element_attrs = {} + def __init__(self, days): self.days = days + def publish(self, handler): if self.days: handler.startElement("skipDays", self.element_attrs) @@ -249,41 +300,40 @@ def publish(self, handler): _element(handler, "day", day) handler.endElement("skipDays") + class RSS2(WriteXmlMixin): """The main RSS class. Stores the channel attributes, with the "category" elements under ".categories" and the RSS items under ".items". """ - + rss_attrs = {"version": "2.0"} element_attrs = {} - def __init__(self, - title, - link, - description, - - language = None, - copyright = None, - managingEditor = None, - webMaster = None, - pubDate = None, # a datetime, *in* *GMT* - lastBuildDate = None, # a datetime - - categories = None, # list of strings or Category - generator = _generator_name, - docs = "http://blogs.law.harvard.edu/tech/rss", - cloud = None, # a Cloud - ttl = None, # integer number of minutes - - image = None, # an Image - rating = None, # a string; I don't know how it's used - textInput = None, # a TextInput - skipHours = None, # a SkipHours with a list of integers - skipDays = None, # a SkipDays with a list of strings - - items = None, # list of RSSItems - ): + + def __init__( + self, + title, + link, + description, + language=None, + copyright=None, + managingEditor=None, + webMaster=None, + pubDate=None, # a datetime, *in* *GMT* + lastBuildDate=None, # a datetime + categories=None, # list of strings or Category + generator=_generator_name, + docs="http://blogs.law.harvard.edu/tech/rss", + cloud=None, # a Cloud + ttl=None, # integer number of minutes + image=None, # an Image + rating=None, # a string; I don't know how it's used + textInput=None, # a TextInput + skipHours=None, # a SkipHours with a list of integers + skipDays=None, # a SkipDays with a list of strings + items=None, # list of RSSItems + ): self.title = title self.link = link self.description = description @@ -294,7 +344,7 @@ def __init__(self, self.webMaster = webMaster self.pubDate = pubDate self.lastBuildDate = lastBuildDate - + if categories is None: categories = [] self.categories = categories @@ -320,7 +370,7 @@ def publish(self, handler): _element(handler, "description", self.description) self.publish_extensions(handler) - + _opt_element(handler, "language", self.language) _opt_element(handler, "copyright", self.copyright) _opt_element(handler, "managingEditor", self.managingEditor) @@ -374,27 +424,27 @@ def publish_extensions(self, handler): # output after the three required fields. pass - - + class RSSItem(WriteXmlMixin): """Publish an RSS Item""" + element_attrs = {} - def __init__(self, - title = None, # string - link = None, # url as string - description = None, # string - author = None, # email address as string - categories = None, # list of string or Category - comments = None, # url as string - enclosure = None, # an Enclosure - guid = None, # a unique string - pubDate = None, # a datetime - source = None, # a Source - ): - + + def __init__( + self, + title=None, # string + link=None, # url as string + description=None, # string + author=None, # email address as string + categories=None, # list of string or Category + comments=None, # url as string + enclosure=None, # an Enclosure + guid=None, # a unique string + pubDate=None, # a datetime + source=None, # a Source + ): if title is None and description is None: - raise TypeError( - "must define at least one of 'title' or 'description'") + raise TypeError("must define at least one of 'title' or 'description'") self.title = title self.link = link self.description = description @@ -421,7 +471,7 @@ def publish(self, handler): if isinstance(category, str): category = Category(category) category.publish(handler) - + _opt_element(handler, "comments", self.comments) if self.enclosure is not None: self.enclosure.publish(handler) @@ -434,7 +484,7 @@ def publish(self, handler): if self.source is not None: self.source.publish(handler) - + handler.endElement("item") def publish_extensions(self, handler): diff --git a/utils/S3.py b/utils/S3.py index 5e219d06ca..ca76850eb9 100644 --- a/utils/S3.py +++ b/utils/S3.py @@ -20,33 +20,34 @@ import urllib.parse import xml.sax -DEFAULT_HOST = 's3.amazonaws.com' -PORTS_BY_SECURITY = { True: 443, False: 80 } -METADATA_PREFIX = 'x-amz-meta-' -AMAZON_HEADER_PREFIX = 'x-amz-' +DEFAULT_HOST = "s3.amazonaws.com" +PORTS_BY_SECURITY = {True: 443, False: 80} +METADATA_PREFIX = "x-amz-meta-" +AMAZON_HEADER_PREFIX = "x-amz-" + # generates the aws canonical string for the given parameters def canonical_string(method, bucket="", key="", query_args={}, headers={}, expires=None): interesting_headers = {} for header_key in headers: lk = header_key.lower() - if lk in ['content-md5', 'content-type', 'date'] or lk.startswith(AMAZON_HEADER_PREFIX): + if lk in ["content-md5", "content-type", "date"] or lk.startswith(AMAZON_HEADER_PREFIX): interesting_headers[lk] = headers[header_key].strip() # these keys get empty strings if they don't exist - if 'content-type' not in interesting_headers: - interesting_headers['content-type'] = '' - if 'content-md5' not in interesting_headers: - interesting_headers['content-md5'] = '' + if "content-type" not in interesting_headers: + interesting_headers["content-type"] = "" + if "content-md5" not in interesting_headers: + interesting_headers["content-md5"] = "" # just in case someone used this. it's not necessary in this lib. - if 'x-amz-date' in interesting_headers: - interesting_headers['date'] = '' + if "x-amz-date" in interesting_headers: + interesting_headers["date"] = "" # if you're using expires for query string auth, then it trumps date # (and x-amz-date) if expires: - interesting_headers['date'] = str(expires) + interesting_headers["date"] = str(expires) sorted_header_keys = list(interesting_headers.keys()) sorted_header_keys.sort() @@ -78,6 +79,7 @@ def canonical_string(method, bucket="", key="", query_args={}, headers={}, expir return buf + # computes the base64'ed hmac-sha hash of the canonical string and the secret # access key, optionally urlencoding the result def encode(aws_secret_access_key, str, urlencode=False): @@ -87,6 +89,7 @@ def encode(aws_secret_access_key, str, urlencode=False): else: return b64_hmac + def merge_meta(headers, metadata): final_headers = headers.copy() for k in list(metadata.keys()): @@ -94,6 +97,7 @@ def merge_meta(headers, metadata): return final_headers + # builds the query arg string def query_args_hash_to_string(query_args): query_string = "" @@ -104,7 +108,7 @@ def query_args_hash_to_string(query_args): piece += "=%s" % urllib.parse.quote_plus(str(v)) pairs.append(piece) - return '&'.join(pairs) + return "&".join(pairs) class CallingFormat: @@ -113,9 +117,9 @@ class CallingFormat: VANITY = 3 def build_url_base(protocol, server, port, bucket, calling_format): - url_base = '%s://' % protocol + url_base = "%s://" % protocol - if bucket == '': + if bucket == "": url_base += server elif calling_format == CallingFormat.SUBDOMAIN: url_base += "%s.%s" % (bucket, server) @@ -126,7 +130,7 @@ def build_url_base(protocol, server, port, bucket, calling_format): url_base += ":%s" % port - if (bucket != '') and (calling_format == CallingFormat.PATH): + if (bucket != "") and (calling_format == CallingFormat.PATH): url_base += "/%s" % bucket return url_base @@ -134,17 +138,21 @@ def build_url_base(protocol, server, port, bucket, calling_format): build_url_base = staticmethod(build_url_base) - class Location: DEFAULT = None - EU = 'EU' - + EU = "EU" class AWSAuthConnection: - def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, - server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): - + def __init__( + self, + aws_access_key_id, + aws_secret_access_key, + is_secure=True, + server=DEFAULT_HOST, + port=None, + calling_format=CallingFormat.SUBDOMAIN, + ): if not port: port = PORTS_BY_SECURITY[is_secure] @@ -156,86 +164,69 @@ def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, self.calling_format = calling_format def create_bucket(self, bucket, headers={}): - return Response(self._make_request('PUT', bucket, '', {}, headers)) + return Response(self._make_request("PUT", bucket, "", {}, headers)) def create_located_bucket(self, bucket, location=Location.DEFAULT, headers={}): if location == Location.DEFAULT: body = "" else: - body = "" + \ - location + \ - "" - return Response(self._make_request('PUT', bucket, '', {}, headers, body)) + body = ( + "" + + location + + "" + ) + return Response(self._make_request("PUT", bucket, "", {}, headers, body)) def check_bucket_exists(self, bucket): - return self._make_request('HEAD', bucket, '', {}, {}) + return self._make_request("HEAD", bucket, "", {}, {}) def list_bucket(self, bucket, options={}, headers={}): - return ListBucketResponse(self._make_request('GET', bucket, '', options, headers)) + return ListBucketResponse(self._make_request("GET", bucket, "", options, headers)) def delete_bucket(self, bucket, headers={}): - return Response(self._make_request('DELETE', bucket, '', {}, headers)) + return Response(self._make_request("DELETE", bucket, "", {}, headers)) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) - return Response( - self._make_request( - 'PUT', - bucket, - key, - {}, - headers, - object.data, - object.metadata)) + return Response(self._make_request("PUT", bucket, key, {}, headers, object.data, object.metadata)) def get(self, bucket, key, headers={}): - return GetResponse( - self._make_request('GET', bucket, key, {}, headers)) + return GetResponse(self._make_request("GET", bucket, key, {}, headers)) def delete(self, bucket, key, headers={}): - return Response( - self._make_request('DELETE', bucket, key, {}, headers)) + return Response(self._make_request("DELETE", bucket, key, {}, headers)) def get_bucket_logging(self, bucket, headers={}): - return GetResponse(self._make_request('GET', bucket, '', { 'logging': None }, headers)) + return GetResponse(self._make_request("GET", bucket, "", {"logging": None}, headers)) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): - return Response(self._make_request('PUT', bucket, '', { 'logging': None }, headers, logging_xml_doc)) + return Response(self._make_request("PUT", bucket, "", {"logging": None}, headers, logging_xml_doc)) def get_bucket_acl(self, bucket, headers={}): - return self.get_acl(bucket, '', headers) + return self.get_acl(bucket, "", headers) def get_acl(self, bucket, key, headers={}): - return GetResponse( - self._make_request('GET', bucket, key, { 'acl': None }, headers)) + return GetResponse(self._make_request("GET", bucket, key, {"acl": None}, headers)) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): - return self.put_acl(bucket, '', acl_xml_document, headers) + return self.put_acl(bucket, "", acl_xml_document, headers) def put_acl(self, bucket, key, acl_xml_document, headers={}): - return Response( - self._make_request( - 'PUT', - bucket, - key, - { 'acl': None }, - headers, - acl_xml_document)) + return Response(self._make_request("PUT", bucket, key, {"acl": None}, headers, acl_xml_document)) def list_all_my_buckets(self, headers={}): - return ListAllMyBucketsResponse(self._make_request('GET', '', '', {}, headers)) + return ListAllMyBucketsResponse(self._make_request("GET", "", "", {}, headers)) def get_bucket_location(self, bucket): - return LocationResponse(self._make_request('GET', bucket, '', {'location' : None})) + return LocationResponse(self._make_request("GET", bucket, "", {"location": None})) # end public methods - def _make_request(self, method, bucket='', key='', query_args={}, headers={}, data='', metadata={}): - - server = '' - if bucket == '': + def _make_request(self, method, bucket="", key="", query_args={}, headers={}, data="", metadata={}): + server = "" + if bucket == "": server = self.server elif self.calling_format == CallingFormat.SUBDOMAIN: server = "%s.%s" % (bucket, self.server) @@ -244,18 +235,17 @@ def _make_request(self, method, bucket='', key='', query_args={}, headers={}, da else: server = self.server - path = '' + path = "" - if (bucket != '') and (self.calling_format == CallingFormat.PATH): + if (bucket != "") and (self.calling_format == CallingFormat.PATH): path += "/%s" % bucket # add the slash after the bucket regardless # the key will be appended if it is non-empty path += "/%s" % urllib.parse.quote_plus(key) - # build the path_argument string - # add the ? in all cases since + # add the ? in all cases since # signature and credentials follow path args if len(query_args): path += "?" + query_args_hash_to_string(query_args) @@ -263,12 +253,12 @@ def _make_request(self, method, bucket='', key='', query_args={}, headers={}, da is_secure = self.is_secure host = "%s:%d" % (server, self.port) while True: - if (is_secure): + if is_secure: connection = http.client.HTTPSConnection(host) else: connection = http.client.HTTPConnection(host) - final_headers = merge_meta(headers, metadata); + final_headers = merge_meta(headers, metadata) # add auth header self._add_aws_auth_header(final_headers, method, bucket, key, query_args) @@ -277,44 +267,55 @@ def _make_request(self, method, bucket='', key='', query_args={}, headers={}, da if resp.status < 300 or resp.status >= 400: return resp # handle redirect - location = resp.getheader('location') + location = resp.getheader("location") if not location: return resp # (close connection) resp.read() - scheme, host, path, params, query, fragment \ - = urllib.parse.urlparse(location) - if scheme == "http": is_secure = True - elif scheme == "https": is_secure = False - else: raise invalidURL("Not http/https: " + location) - if query: path += "?" + query + scheme, host, path, params, query, fragment = urllib.parse.urlparse(location) + if scheme == "http": + is_secure = True + elif scheme == "https": + is_secure = False + else: + raise invalidURL("Not http/https: " + location) + if query: + path += "?" + query # retry with redirect def _add_aws_auth_header(self, headers, method, bucket, key, query_args): - if 'Date' not in headers: - headers['Date'] = time.strftime("%a, %d %b %Y %X GMT", time.gmtime()) + if "Date" not in headers: + headers["Date"] = time.strftime("%a, %d %b %Y %X GMT", time.gmtime()) c_string = canonical_string(method, bucket, key, query_args, headers) - headers['Authorization'] = \ - "AWS %s:%s" % (self.aws_access_key_id, encode(self.aws_secret_access_key, c_string)) + headers["Authorization"] = "AWS %s:%s" % ( + self.aws_access_key_id, + encode(self.aws_secret_access_key, c_string), + ) class QueryStringAuthGenerator: # by default, expire in 1 minute DEFAULT_EXPIRES_IN = 60 - def __init__(self, aws_access_key_id, aws_secret_access_key, is_secure=True, - server=DEFAULT_HOST, port=None, calling_format=CallingFormat.SUBDOMAIN): - + def __init__( + self, + aws_access_key_id, + aws_secret_access_key, + is_secure=True, + server=DEFAULT_HOST, + port=None, + calling_format=CallingFormat.SUBDOMAIN, + ): if not port: port = PORTS_BY_SECURITY[is_secure] self.aws_access_key_id = aws_access_key_id self.aws_secret_access_key = aws_secret_access_key - if (is_secure): - self.protocol = 'https' + if is_secure: + self.protocol = "https" else: - self.protocol = 'http' + self.protocol = "http" self.is_secure = is_secure self.server = server @@ -335,58 +336,53 @@ def set_expires(self, expires): self.__expires_in = None def create_bucket(self, bucket, headers={}): - return self.generate_url('PUT', bucket, '', {}, headers) + return self.generate_url("PUT", bucket, "", {}, headers) def list_bucket(self, bucket, options={}, headers={}): - return self.generate_url('GET', bucket, '', options, headers) + return self.generate_url("GET", bucket, "", options, headers) def delete_bucket(self, bucket, headers={}): - return self.generate_url('DELETE', bucket, '', {}, headers) + return self.generate_url("DELETE", bucket, "", {}, headers) def put(self, bucket, key, object, headers={}): if not isinstance(object, S3Object): object = S3Object(object) - return self.generate_url( - 'PUT', - bucket, - key, - {}, - merge_meta(headers, object.metadata)) + return self.generate_url("PUT", bucket, key, {}, merge_meta(headers, object.metadata)) def get(self, bucket, key, headers={}): - return self.generate_url('GET', bucket, key, {}, headers) + return self.generate_url("GET", bucket, key, {}, headers) def delete(self, bucket, key, headers={}): - return self.generate_url('DELETE', bucket, key, {}, headers) + return self.generate_url("DELETE", bucket, key, {}, headers) def get_bucket_logging(self, bucket, headers={}): - return self.generate_url('GET', bucket, '', { 'logging': None }, headers) + return self.generate_url("GET", bucket, "", {"logging": None}, headers) def put_bucket_logging(self, bucket, logging_xml_doc, headers={}): - return self.generate_url('PUT', bucket, '', { 'logging': None }, headers) + return self.generate_url("PUT", bucket, "", {"logging": None}, headers) def get_bucket_acl(self, bucket, headers={}): - return self.get_acl(bucket, '', headers) + return self.get_acl(bucket, "", headers) - def get_acl(self, bucket, key='', headers={}): - return self.generate_url('GET', bucket, key, { 'acl': None }, headers) + def get_acl(self, bucket, key="", headers={}): + return self.generate_url("GET", bucket, key, {"acl": None}, headers) def put_bucket_acl(self, bucket, acl_xml_document, headers={}): - return self.put_acl(bucket, '', acl_xml_document, headers) + return self.put_acl(bucket, "", acl_xml_document, headers) # don't really care what the doc is here. def put_acl(self, bucket, key, acl_xml_document, headers={}): - return self.generate_url('PUT', bucket, key, { 'acl': None }, headers) + return self.generate_url("PUT", bucket, key, {"acl": None}, headers) def list_all_my_buckets(self, headers={}): - return self.generate_url('GET', '', '', {}, headers) + return self.generate_url("GET", "", "", {}, headers) - def make_bare_url(self, bucket, key=''): + def make_bare_url(self, bucket, key=""): full_url = self.generate_url(self, bucket, key) - return full_url[:full_url.index('?')] + return full_url[: full_url.index("?")] - def generate_url(self, method, bucket='', key='', query_args={}, headers={}): + def generate_url(self, method, bucket="", key="", query_args={}, headers={}): expires = 0 if self.__expires_in != None: expires = int(time.time() + self.__expires_in) @@ -402,9 +398,9 @@ def generate_url(self, method, bucket='', key='', query_args={}, headers={}): url += "/%s" % urllib.parse.quote_plus(key) - query_args['Signature'] = encoded_canonical - query_args['Expires'] = expires - query_args['AWSAccessKeyId'] = self.aws_access_key_id + query_args["Signature"] = encoded_canonical + query_args["Expires"] = expires + query_args["AWSAccessKeyId"] = self.aws_access_key_id url += "?%s" % query_args_hash_to_string(query_args) @@ -416,13 +412,15 @@ def __init__(self, data, metadata={}): self.data = data self.metadata = metadata + class Owner: - def __init__(self, id='', display_name=''): + def __init__(self, id="", display_name=""): self.id = id self.display_name = display_name + class ListEntry: - def __init__(self, key='', last_modified=None, etag='', size=0, storage_class='', owner=None): + def __init__(self, key="", last_modified=None, etag="", size=0, storage_class="", owner=None): self.key = key self.last_modified = last_modified self.etag = etag @@ -430,15 +428,18 @@ def __init__(self, key='', last_modified=None, etag='', size=0, storage_class='' self.storage_class = storage_class self.owner = owner + class CommonPrefixEntry: - def __init(self, prefix=''): + def __init(self, prefix=""): self.prefix = prefix + class Bucket: - def __init__(self, name='', creation_date=''): + def __init__(self, name="", creation_date=""): self.name = name self.creation_date = creation_date + class Response: def __init__(self, http_response): self.http_response = http_response @@ -451,7 +452,6 @@ def __init__(self, http_response): self.message = "%03d %s" % (http_response.status, http_response.reason) - class ListBucketResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) @@ -470,20 +470,22 @@ def __init__(self, http_response): else: self.entries = [] + class ListAllMyBucketsResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - if http_response.status < 300: + if http_response.status < 300: handler = ListAllMyBucketsHandler() xml.sax.parseString(self.body, handler) self.entries = handler.entries else: self.entries = [] + class GetResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - response_headers = http_response.msg # older pythons don't have getheaders + response_headers = http_response.msg # older pythons don't have getheaders metadata = self.get_aws_metadata(response_headers) self.object = S3Object(self.body, metadata) @@ -491,82 +493,83 @@ def get_aws_metadata(self, headers): metadata = {} for hkey in list(headers.keys()): if hkey.lower().startswith(METADATA_PREFIX): - metadata[hkey[len(METADATA_PREFIX):]] = headers[hkey] + metadata[hkey[len(METADATA_PREFIX) :]] = headers[hkey] del headers[hkey] return metadata + class LocationResponse(Response): def __init__(self, http_response): Response.__init__(self, http_response) - if http_response.status < 300: + if http_response.status < 300: handler = LocationHandler() xml.sax.parseString(self.body, handler) self.location = handler.location + class ListBucketHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None - self.curr_text = '' + self.curr_text = "" self.common_prefixes = [] self.curr_common_prefix = None - self.name = '' - self.marker = '' - self.prefix = '' + self.name = "" + self.marker = "" + self.prefix = "" self.is_truncated = False - self.delimiter = '' + self.delimiter = "" self.max_keys = 0 - self.next_marker = '' + self.next_marker = "" self.is_echoed_prefix_set = False def startElement(self, name, attrs): - if name == 'Contents': + if name == "Contents": self.curr_entry = ListEntry() - elif name == 'Owner': + elif name == "Owner": self.curr_entry.owner = Owner() - elif name == 'CommonPrefixes': + elif name == "CommonPrefixes": self.curr_common_prefix = CommonPrefixEntry() - def endElement(self, name): - if name == 'Contents': + if name == "Contents": self.entries.append(self.curr_entry) - elif name == 'CommonPrefixes': + elif name == "CommonPrefixes": self.common_prefixes.append(self.curr_common_prefix) - elif name == 'Key': + elif name == "Key": self.curr_entry.key = self.curr_text - elif name == 'LastModified': + elif name == "LastModified": self.curr_entry.last_modified = self.curr_text - elif name == 'ETag': + elif name == "ETag": self.curr_entry.etag = self.curr_text - elif name == 'Size': + elif name == "Size": self.curr_entry.size = int(self.curr_text) - elif name == 'ID': + elif name == "ID": self.curr_entry.owner.id = self.curr_text - elif name == 'DisplayName': + elif name == "DisplayName": self.curr_entry.owner.display_name = self.curr_text - elif name == 'StorageClass': + elif name == "StorageClass": self.curr_entry.storage_class = self.curr_text - elif name == 'Name': + elif name == "Name": self.name = self.curr_text - elif name == 'Prefix' and self.is_echoed_prefix_set: + elif name == "Prefix" and self.is_echoed_prefix_set: self.curr_common_prefix.prefix = self.curr_text - elif name == 'Prefix': + elif name == "Prefix": self.prefix = self.curr_text self.is_echoed_prefix_set = True - elif name == 'Marker': + elif name == "Marker": self.marker = self.curr_text - elif name == 'IsTruncated': - self.is_truncated = self.curr_text == 'true' - elif name == 'Delimiter': + elif name == "IsTruncated": + self.is_truncated = self.curr_text == "true" + elif name == "Delimiter": self.delimiter = self.curr_text - elif name == 'MaxKeys': + elif name == "MaxKeys": self.max_keys = int(self.curr_text) - elif name == 'NextMarker': + elif name == "NextMarker": self.next_marker = self.curr_text - self.curr_text = '' + self.curr_text = "" def characters(self, content): self.curr_text += content @@ -576,18 +579,18 @@ class ListAllMyBucketsHandler(xml.sax.ContentHandler): def __init__(self): self.entries = [] self.curr_entry = None - self.curr_text = '' + self.curr_text = "" def startElement(self, name, attrs): - if name == 'Bucket': + if name == "Bucket": self.curr_entry = Bucket() def endElement(self, name): - if name == 'Name': + if name == "Name": self.curr_entry.name = self.curr_text - elif name == 'CreationDate': + elif name == "CreationDate": self.curr_entry.creation_date = self.curr_text - elif name == 'Bucket': + elif name == "Bucket": self.entries.append(self.curr_entry) def characters(self, content): @@ -597,21 +600,24 @@ def characters(self, content): class LocationHandler(xml.sax.ContentHandler): def __init__(self): self.location = None - self.state = 'init' + self.state = "init" def startElement(self, name, attrs): - if self.state == 'init': - if name == 'LocationConstraint': - self.state = 'tag_location' - self.location = '' - else: self.state = 'bad' - else: self.state = 'bad' + if self.state == "init": + if name == "LocationConstraint": + self.state = "tag_location" + self.location = "" + else: + self.state = "bad" + else: + self.state = "bad" def endElement(self, name): - if self.state == 'tag_location' and name == 'LocationConstraint': - self.state = 'done' - else: self.state = 'bad' + if self.state == "tag_location" and name == "LocationConstraint": + self.state = "done" + else: + self.state = "bad" def characters(self, content): - if self.state == 'tag_location': + if self.state == "tag_location": self.location += content diff --git a/utils/archive/Image Color Algorithm.py b/utils/archive/Image Color Algorithm.py index f02c6fdbd7..d6fc230f4c 100644 --- a/utils/archive/Image Color Algorithm.py +++ b/utils/archive/Image Color Algorithm.py @@ -3,7 +3,7 @@ import scipy.cluster from pprint import pprint -image = Image.open('logo.png') +image = Image.open("logo.png") NUM_CLUSTERS = 5 # Convert image into array of values for each point. @@ -20,11 +20,20 @@ # Pare centroids, removing blacks and whites and shades of really dark and really light. original_codes = codes for low, hi in [(60, 200), (35, 230), (10, 250)]: - codes = scipy.array([code for code in codes - if not ((code[0] < low and code[1] < low and code[2] < low) or - (code[0] > hi and code[1] > hi and code[2] > hi))]) - if not len(codes): codes = original_codes - else: break + codes = scipy.array( + [ + code + for code in codes + if not ( + (code[0] < low and code[1] < low and code[2] < low) + or (code[0] > hi and code[1] > hi and code[2] > hi) + ) + ] + ) + if not len(codes): + codes = original_codes + else: + break # Assign codes (vector quantization). Each vector is compared to the centroids # and assigned the nearest one. @@ -34,12 +43,12 @@ counts, bins = scipy.histogram(vecs, len(codes)) # Show colors for each code in its hex value. -colors = [''.join(chr(c) for c in code).encode('hex') for code in codes] +colors = ["".join(chr(c) for c in code).encode("hex") for code in codes] total = scipy.sum(counts) -color_dist = dict(list(zip(colors, [count/float(total) for count in counts]))) +color_dist = dict(list(zip(colors, [count / float(total) for count in counts]))) pprint(color_dist) # Find the most frequent color, based on the counts. index_max = scipy.argmax(counts) peak = codes[index_max] -color = ''.join(chr(c) for c in peak).encode('hex') +color = "".join(chr(c) for c in peak).encode("hex") diff --git a/utils/archive/bootstrap_intel.py b/utils/archive/bootstrap_intel.py index fd2d7fe3f5..357fe5401c 100644 --- a/utils/archive/bootstrap_intel.py +++ b/utils/archive/bootstrap_intel.py @@ -6,8 +6,7 @@ from apps.analyzer.models import MClassifierTag from apps.analyzer.models import MClassifierTitle -for classifier_cls in [MClassifierFeed, MClassifierAuthor, - MClassifierTag, MClassifierTitle]: +for classifier_cls in [MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle]: print(" ================================================================= ") print((" Now on %s " % classifier_cls.__name__)) print(" ================================================================= ") @@ -28,4 +27,3 @@ except ValidationError as e: print((" ***> ValidationError error on: %s" % e)) print((" ***> Original classifier: %s" % classifier.__dict__)) - diff --git a/utils/archive/bootstrap_mongo.py b/utils/archive/bootstrap_mongo.py index d7fd747aa2..5df95f55db 100644 --- a/utils/archive/bootstrap_mongo.py +++ b/utils/archive/bootstrap_mongo.py @@ -10,7 +10,8 @@ from utils import json_functions as json MONGO_DB = settings.MONGO_DB -db = mongoengine.connect(MONGO_DB['NAME'], host=MONGO_DB['HOST'], port=MONGO_DB['PORT']) +db = mongoengine.connect(MONGO_DB["NAME"], host=MONGO_DB["HOST"], port=MONGO_DB["PORT"]) + def bootstrap_stories(): print("Mongo DB stories: %s" % MStory.objects().count()) @@ -20,24 +21,23 @@ def bootstrap_stories(): print("Stories: %s" % Story.objects.all().count()) pprint(db.stories.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s (%s stories)" % (i, feed_count, - feed, Story.objects.filter(story_feed=feed).count())) + print("%s/%s: %s (%s stories)" % (i, feed_count, feed, Story.objects.filter(story_feed=feed).count())) sys.stdout.flush() - + stories = list(Story.objects.filter(story_feed=feed).values()) for story in stories: # story['story_tags'] = [tag.name for tag in Tag.objects.filter(story=story['id'])] try: - story['story_tags'] = json.decode(story['story_tags']) + story["story_tags"] = json.decode(story["story_tags"]) except: continue - del story['id'] - del story['story_author_id'] + del story["id"] + del story["story_author_id"] try: MStory(**story).save() except: @@ -45,6 +45,7 @@ def bootstrap_stories(): print("\nMongo DB stories: %s" % MStory.objects().count()) + def bootstrap_userstories(): print("Mongo DB userstories: %s" % MUserStory.objects().count()) # db.userstories.drop() @@ -56,58 +57,64 @@ def bootstrap_userstories(): userstories = list(UserStory.objects.all().values()) for userstory in userstories: try: - story = Story.objects.get(pk=userstory['story_id']) + story = Story.objects.get(pk=userstory["story_id"]) except Story.DoesNotExist: continue try: - userstory['story'] = MStory.objects(story_feed_id=story.story_feed.pk, story_guid=story.story_guid)[0] + userstory["story"] = MStory.objects( + story_feed_id=story.story_feed.pk, story_guid=story.story_guid + )[0] except: - print('!') + print("!") continue - print('.') - del userstory['id'] - del userstory['opinion'] - del userstory['story_id'] + print(".") + del userstory["id"] + del userstory["opinion"] + del userstory["story_id"] try: MUserStory(**userstory).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue print("\nMongo DB userstories: %s" % MUserStory.objects().count()) + def bootstrap_classifiers(): - for sql_classifier, mongo_classifier in ((ClassifierTitle, MClassifierTitle), - (ClassifierAuthor, MClassifierAuthor), - (ClassifierFeed, MClassifierFeed), - (ClassifierTag, MClassifierTag)): - collection = mongo_classifier.meta['collection'] + for sql_classifier, mongo_classifier in ( + (ClassifierTitle, MClassifierTitle), + (ClassifierAuthor, MClassifierAuthor), + (ClassifierFeed, MClassifierFeed), + (ClassifierTag, MClassifierTag), + ): + collection = mongo_classifier.meta["collection"] print("Mongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) # db[collection].drop() print("Dropped! Mongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) print("%s: %s" % (sql_classifier._meta.object_name, sql_classifier.objects.all().count())) pprint(db[collection].index_information()) - + for userclassifier in list(sql_classifier.objects.all().values()): - del userclassifier['id'] - if sql_classifier._meta.object_name == 'ClassifierAuthor': - author = StoryAuthor.objects.get(pk=userclassifier['author_id']) - userclassifier['author'] = author.author_name - del userclassifier['author_id'] - if sql_classifier._meta.object_name == 'ClassifierTag': - tag = Tag.objects.get(pk=userclassifier['tag_id']) - userclassifier['tag'] = tag.name - del userclassifier['tag_id'] - print('.') + del userclassifier["id"] + if sql_classifier._meta.object_name == "ClassifierAuthor": + author = StoryAuthor.objects.get(pk=userclassifier["author_id"]) + userclassifier["author"] = author.author_name + del userclassifier["author_id"] + if sql_classifier._meta.object_name == "ClassifierTag": + tag = Tag.objects.get(pk=userclassifier["tag_id"]) + userclassifier["tag"] = tag.name + del userclassifier["tag_id"] + print(".") try: mongo_classifier(**userclassifier).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - + print("\nMongo DB classifiers: %s - %s" % (collection, mongo_classifier.objects().count())) - + + def bootstrap_feedpages(): print("Mongo DB feed_pages: %s" % MFeedPage.objects().count()) # db.feed_pages.drop() @@ -116,28 +123,35 @@ def bootstrap_feedpages(): print("FeedPages: %s" % FeedPage.objects.count()) pprint(db.feed_pages.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s" % (i, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + i, + feed_count, + feed, + ) + ) sys.stdout.flush() - + if not MFeedPage.objects(feed_id=feed.pk): feed_page = list(FeedPage.objects.filter(feed=feed).values()) if feed_page: - del feed_page[0]['id'] - feed_page[0]['feed_id'] = feed.pk + del feed_page[0]["id"] + feed_page[0]["feed_id"] = feed.pk try: MFeedPage(**feed_page[0]).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - print("\nMongo DB feed_pages: %s" % MFeedPage.objects().count()) + def bootstrap_feedicons(): print("Mongo DB feed_icons: %s" % MFeedIcon.objects().count()) db.feed_icons.drop() @@ -146,47 +160,62 @@ def bootstrap_feedicons(): print("FeedIcons: %s" % FeedIcon.objects.count()) pprint(db.feed_icons.index_information()) - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() i = 0 for feed in feeds: i += 1 - print("%s/%s: %s" % (i, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + i, + feed_count, + feed, + ) + ) sys.stdout.flush() - + if not MFeedIcon.objects(feed_id=feed.pk): feed_icon = list(FeedIcon.objects.filter(feed=feed).values()) if feed_icon: try: MFeedIcon(**feed_icon[0]).save() except: - print('\n\n!\n\n') + print("\n\n!\n\n") continue - print("\nMongo DB feed_icons: %s" % MFeedIcon.objects().count()) + def compress_stories(): count = MStory.objects().count() print("Mongo DB stories: %s" % count) p = 0.0 i = 0 - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() f = 0 for feed in feeds: f += 1 - print("%s/%s: %s" % (f, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + f, + feed_count, + feed, + ) + ) sys.stdout.flush() - + for story in MStory.objects(story_feed_id=feed.pk): i += 1.0 if round(i / count * 100) != p: p = round(i / count * 100) - print('%s%%' % p) + print("%s%%" % p) story.save() - + + def reindex_stories(): db = pymongo.Connection().newsblur count = MStory.objects().count() @@ -194,18 +223,25 @@ def reindex_stories(): p = 0.0 i = 0 - feeds = Feed.objects.all().order_by('-average_stories_per_month') + feeds = Feed.objects.all().order_by("-average_stories_per_month") feed_count = feeds.count() f = 0 for feed in feeds: f += 1 - print("%s/%s: %s" % (f, feed_count, feed,)) + print( + "%s/%s: %s" + % ( + f, + feed_count, + feed, + ) + ) sys.stdout.flush() for story in MStory.objects(story_feed_id=feed.pk): i += 1.0 if round(i / count * 100) != p: p = round(i / count * 100) - print('%s%%' % p) + print("%s%%" % p) if isinstance(story.id, str): story.story_guid = story.id story.id = pymongo.objectid.ObjectId() @@ -214,14 +250,15 @@ def reindex_stories(): except OperationError as e: print(" ***> OperationError: %s" % e) except e: - print(' ***> Unknown Error: %s' % e) + print(" ***> Unknown Error: %s" % e) db.stories.remove({"_id": story.story_guid}) - -if __name__ == '__main__': + + +if __name__ == "__main__": # bootstrap_stories() # bootstrap_userstories() # bootstrap_classifiers() # bootstrap_feedpages() # compress_stories() # reindex_stories() - bootstrap_feedicons() \ No newline at end of file + bootstrap_feedicons() diff --git a/utils/archive/bootstrap_redis_sessions.py b/utils/archive/bootstrap_redis_sessions.py index fc13bb5753..1359415a65 100644 --- a/utils/archive/bootstrap_redis_sessions.py +++ b/utils/archive/bootstrap_redis_sessions.py @@ -8,7 +8,7 @@ batch_size = 1000 r = redis.Redis(connection_pool=settings.REDIS_SESSION_POOL) -for batch in range(int(math.ceil(sessions_count / batch_size))+1): +for batch in range(int(math.ceil(sessions_count / batch_size)) + 1): start = batch * batch_size end = (batch + 1) * batch_size print((" ---> Loading sessions #%s - #%s" % (start, end))) @@ -16,4 +16,4 @@ for session in Session.objects.all()[start:end]: _ = pipe.set(session.session_key, session.session_data) _ = pipe.expireat(session.session_key, session.expire_date.strftime("%s")) - _ = pipe.execute() \ No newline at end of file + _ = pipe.execute() diff --git a/utils/archive/bootstrap_story_hash.py b/utils/archive/bootstrap_story_hash.py index efcb31e5fb..7d856670d9 100644 --- a/utils/archive/bootstrap_story_hash.py +++ b/utils/archive/bootstrap_story_hash.py @@ -6,24 +6,24 @@ db = settings.MONGODB batch = 0 start = 0 -for f in range(start, Feed.objects.latest('pk').pk): - if f < batch*100000: continue +for f in range(start, Feed.objects.latest("pk").pk): + if f < batch * 100000: + continue start = time.time() try: cp1 = time.time() - start # if feed.active_premium_subscribers < 1: continue - stories = MStory.objects.filter(story_feed_id=f, story_hash__exists=False)\ - .only('id', 'story_feed_id', 'story_guid')\ - .read_preference(pymongo.ReadPreference.SECONDARY) + stories = ( + MStory.objects.filter(story_feed_id=f, story_hash__exists=False) + .only("id", "story_feed_id", "story_guid") + .read_preference(pymongo.ReadPreference.SECONDARY) + ) cp2 = time.time() - start count = 0 for story in stories: count += 1 - db.newsblur.stories.update({"_id": story.id}, {"$set": { - "story_hash": story.feed_guid_hash - }}) + db.newsblur.stories.update({"_id": story.id}, {"$set": {"story_hash": story.feed_guid_hash}}) cp3 = time.time() - start print(("%s: %3s stories (%s/%s/%s)" % (f, count, round(cp1, 2), round(cp2, 2), round(cp3, 2)))) except Exception as e: print((" ***> (%s) %s" % (f, e))) - diff --git a/utils/archive/check_status.py b/utils/archive/check_status.py index cbad9f317e..b5e20d707f 100644 --- a/utils/archive/check_status.py +++ b/utils/archive/check_status.py @@ -1,5 +1,6 @@ import time import requests + url = "http://www.newsblur.com" @@ -8,6 +9,10 @@ req = requests.get(url) content = req.content end = time.time() - print((" ---> [%s] Retrieved %s bytes - %s %s" % (str(end - start)[:4], len(content), req.status_code, req.reason))) + print( + ( + " ---> [%s] Retrieved %s bytes - %s %s" + % (str(end - start)[:4], len(content), req.status_code, req.reason) + ) + ) time.sleep(5) - diff --git a/utils/archive/green.py b/utils/archive/green.py index 46e09359dc..7f746cd85d 100644 --- a/utils/archive/green.py +++ b/utils/archive/green.py @@ -1,4 +1,5 @@ from gevent import monkey + monkey.patch_socket() from newsblur.utils import feedparser @@ -6,13 +7,15 @@ from gevent import queue import urllib.request, urllib.error, urllib.parse + def fetch_title(url): print(("Running %s" % url)) data = urllib.request.urlopen(url).read() print(("Parsing %s" % url)) d = feedparser.parse(data) - print(("Parsed %s" % d.feed.get('title', ''))) - return d.feed.get('title', '') + print(("Parsed %s" % d.feed.get("title", ""))) + return d.feed.get("title", "") + def worker(): while True: @@ -22,15 +25,18 @@ def worker(): finally: q.task_done() -if __name__ == '__main__': + +if __name__ == "__main__": q = queue.JoinableQueue() for i in range(5): - gevent.spawn(worker) + gevent.spawn(worker) - for url in "http://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://www.asianart.org/feeds/Lectures,Classes,Symposia.xml/nhttp://www.asianart.org/feeds/Performances.xml/nhttp://feeds.feedburner.com/ajaxian/nhttp://ajaxian.com/index.xml/nhttp://al3x.net/atom.xml/nhttp://feeds.feedburner.com/AmericanDrink/nhttp://feeds.feedburner.com/eod_full/nhttp://feeds.feedburner.com/typepad/notes/nhttp://feeds.dashes.com/AnilDash/nhttp://rss.sciam.com/assignment-impossible/feed/nhttp://blogs.scientificamerican.com/assignment-impossible//nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://www.betabeat.com/feed/".split('/n'): - print(("Spawning: %s" % url)) - q.put(url) + for ( + url + ) in "http://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://www.43folders.com/rss.xml/nhttp://feeds.feedburner.com/43folders/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://feeds.feedburner.com/AMinuteWithBrendan/nhttp://www.asianart.org/feeds/Lectures,Classes,Symposia.xml/nhttp://www.asianart.org/feeds/Performances.xml/nhttp://feeds.feedburner.com/ajaxian/nhttp://ajaxian.com/index.xml/nhttp://al3x.net/atom.xml/nhttp://feeds.feedburner.com/AmericanDrink/nhttp://feeds.feedburner.com/eod_full/nhttp://feeds.feedburner.com/typepad/notes/nhttp://feeds.dashes.com/AnilDash/nhttp://rss.sciam.com/assignment-impossible/feed/nhttp://blogs.scientificamerican.com/assignment-impossible//nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://feeds.feedburner.com/Beautiful-Pixels/nhttp://www.betabeat.com/feed/".split( + "/n" + ): + print(("Spawning: %s" % url)) + q.put(url) q.join() # block until all tasks are done - - diff --git a/utils/archive/knight.py b/utils/archive/knight.py index d8b4371239..f35db12b64 100644 --- a/utils/archive/knight.py +++ b/utils/archive/knight.py @@ -1,15 +1,15 @@ # Screen scrapes the Knight News Challenge entries (all 64 pages of them) # and counts the number of votes/hearts for each entry. Then displays them # in rank order. -# +# # This script runs in about 20 seconds. import requests from BeautifulSoup import BeautifulSoup # Winners found on http://newschallenge.tumblr.com/post/20962258701/knight-news-challenge-on-networks-moving-to-the-next: -# -# $('.posts .MsoNormal > span').find('a[href^="http://newschallenge.tumblr.com/post"]').map(function() { +# +# $('.posts .MsoNormal > span').find('a[href^="http://newschallenge.tumblr.com/post"]').map(function() { # return $(this).attr('href'); # }); @@ -70,7 +70,9 @@ "http://newschallenge.tumblr.com/post/19493920734/get-to-the-source", "http://newschallenge.tumblr.com/post/19480128205/farm-to-table-school-lunch", "http://newschallenge.tumblr.com/post/19477700441/partisans-org", - "http://newschallenge.tumblr.com/post/19345505702/protecting-journalists-and-engaging-communities"] + "http://newschallenge.tumblr.com/post/19345505702/protecting-journalists-and-engaging-communities", +] + def find_entries(): page = 1 @@ -79,73 +81,85 @@ def find_entries(): while True: print(" ---> Found %s entries so far. Now on page: %s" % (len(entries), page)) - + knight_url = "http://newschallenge.tumblr.com/page/%s" % (page) html = requests.get(knight_url).content soup = BeautifulSoup(html) postboxes = soup.findAll("div", "postbox") - + # Done if only sticky entry is left. if len(postboxes) <= 1: break page += 1 - + # 15 entries per page, plus a sticky throwaway entry for entry in postboxes: - if 'stickyPost' in entry.get('class'): continue - + if "stickyPost" in entry.get("class"): + continue + total_entry_count += 1 likes = entry.find("", "home-likes") if likes and likes.text: likes = int(likes.text) else: likes = 0 - + comments = entry.find("", "home-comments") if comments and comments.text: comments = int(comments.text) else: comments = 0 - + title = entry.find("h2") if title: title = title.text - - url = entry.find('a', "home-view") + + url = entry.find("a", "home-view") if url: - url = url.get('href') - + url = url.get("href") + # Only record active entries if comments or likes: - entries.append({ - 'likes': likes, - 'comments': comments, - 'title': title, - 'url': url, - }) + entries.append( + { + "likes": likes, + "comments": comments, + "title": title, + "url": url, + } + ) # time.sleep(random.randint(0, 2)) - - entries.sort(key=lambda e: e['comments'] + e['likes']) + + entries.sort(key=lambda e: e["comments"] + e["likes"]) entries.reverse() active_entry_count = len(entries) - + found_entries = [] winner_count = 0 for i, entry in enumerate(entries): - is_winner = entry['url'] in winners - if is_winner: winner_count += 1 - print(" * %s#%s: %s likes - [%s](%s)%s" % ( - "**" if is_winner else "", - i + 1, - entry['likes'], entry['title'], - entry['url'], - "**" if is_winner else "")) + is_winner = entry["url"] in winners + if is_winner: + winner_count += 1 + print( + " * %s#%s: %s likes - [%s](%s)%s" + % ( + "**" if is_winner else "", + i + 1, + entry["likes"], + entry["title"], + entry["url"], + "**" if is_winner else "", + ) + ) found_entries.append(entry) - - print(" ***> Found %s active entries among %s total applications with %s/%s winners." % ( - active_entry_count, total_entry_count, winner_count, len(winners))) + + print( + " ***> Found %s active entries among %s total applications with %s/%s winners." + % (active_entry_count, total_entry_count, winner_count, len(winners)) + ) return found_entries -if __name__ == '__main__': - find_entries() \ No newline at end of file + +if __name__ == "__main__": + find_entries() diff --git a/utils/archive/memcached_status.py b/utils/archive/memcached_status.py index e5be7b37a3..62f4263711 100644 --- a/utils/archive/memcached_status.py +++ b/utils/archive/memcached_status.py @@ -2,46 +2,46 @@ import re import sys from settings import CACHE_BACKEND -#gfranxman + +# gfranxman verbose = False -if not CACHE_BACKEND.startswith( 'memcached://' ): +if not CACHE_BACKEND.startswith("memcached://"): print("you are not configured to use memcched as your django cache backend") else: - m = re.search( r'//(.+:\d+)', CACHE_BACKEND ) - cache_host = m.group(1) + m = re.search(r"//(.+:\d+)", CACHE_BACKEND) + cache_host = m.group(1) - h = memcache._Host( cache_host ) + h = memcache._Host(cache_host) h.connect() - h.send_cmd( 'stats' ) + h.send_cmd("stats") stats = {} - pat = re.compile( r'STAT (\w+) (\w+)' ) + pat = re.compile(r"STAT (\w+) (\w+)") - l = '' ; - while l.find( 'END' ) < 0 : + l = "" + while l.find("END") < 0: l = h.readline() if verbose: print(l) - m = pat.match( l ) - if m : - stats[ m.group(1) ] = m.group(2) - + m = pat.match(l) + if m: + stats[m.group(1)] = m.group(2) h.close_socket() if verbose: print(stats) - items = int( stats[ 'curr_items' ] ) - bytes = int( stats[ 'bytes' ] ) - limit_maxbytes = int( stats[ 'limit_maxbytes' ] ) or bytes - current_conns = int( stats[ 'curr_connections' ] ) + items = int(stats["curr_items"]) + bytes = int(stats["bytes"]) + limit_maxbytes = int(stats["limit_maxbytes"]) or bytes + current_conns = int(stats["curr_connections"]) - print("MemCache status for %s" % ( CACHE_BACKEND )) - print("%d items using %d of %d" % ( items, bytes, limit_maxbytes )) - print("%5.2f%% full" % ( 100.0 * bytes / limit_maxbytes )) - print("%d connections being handled" % ( current_conns )) - print() \ No newline at end of file + print("MemCache status for %s" % (CACHE_BACKEND)) + print("%d items using %d of %d" % (items, bytes, limit_maxbytes)) + print("%5.2f%% full" % (100.0 * bytes / limit_maxbytes)) + print("%d connections being handled" % (current_conns)) + print() diff --git a/utils/db_functions.py b/utils/db_functions.py index 9fdabf8ac4..7a2a7cff77 100644 --- a/utils/db_functions.py +++ b/utils/db_functions.py @@ -3,23 +3,24 @@ PRIMARY_STATE = 1 SECONDARY_STATE = 2 + def mongo_max_replication_lag(connection): try: - status = connection.admin.command('replSetGetStatus') + status = connection.admin.command("replSetGetStatus") except pymongo.errors.OperationFailure: return 0 - - members = status['members'] + + members = status["members"] primary_optime = None oldest_secondary_optime = None for member in members: - member_state = member['state'] - optime = member['optime'] + member_state = member["state"] + optime = member["optime"] if member_state == PRIMARY_STATE: - primary_optime = optime['ts'].time + primary_optime = optime["ts"].time elif member_state == SECONDARY_STATE: - if not oldest_secondary_optime or optime['ts'].time < oldest_secondary_optime: - oldest_secondary_optime = optime['ts'].time + if not oldest_secondary_optime or optime["ts"].time < oldest_secondary_optime: + oldest_secondary_optime = optime["ts"].time if not primary_optime or not oldest_secondary_optime: return 0 diff --git a/utils/exception_middleware.py b/utils/exception_middleware.py index de282de65b..5eec95079c 100644 --- a/utils/exception_middleware.py +++ b/utils/exception_middleware.py @@ -3,23 +3,23 @@ import inspect from pprint import pprint + class ConsoleExceptionMiddleware: def process_exception(self, request, exception): exc_info = sys.exc_info() print("######################## Exception #############################") - print(('\n'.join(traceback.format_exception(*(exc_info or sys.exc_info()))))) + print(("\n".join(traceback.format_exception(*(exc_info or sys.exc_info()))))) print("----------------------------------------------------------------") # pprint(inspect.trace()[-1][0].f_locals) print("################################################################") - - #pprint(request) - #print "################################################################" + + # pprint(request) + # print "################################################################" def __init__(self, get_response=None): self.get_response = get_response def __call__(self, request): - response = self.get_response(request) return response diff --git a/utils/facebook_fetcher.py b/utils/facebook_fetcher.py index 356169e85b..798bd683e7 100644 --- a/utils/facebook_fetcher.py +++ b/utils/facebook_fetcher.py @@ -9,216 +9,227 @@ from utils import log as logging from vendor.facebook import GraphAPIError + class FacebookFetcher: - def __init__(self, feed, options=None): self.feed = feed self.options = options or {} - + def fetch(self): page_name = self.extract_page_name() - if not page_name: + if not page_name: return facebook_user = self.facebook_user() if not facebook_user: return - + # If 'video', use video API to get embed: # f.get_object('tastyvegetarian', fields='posts') # f.get_object('1992797300790726', fields='embed_html') - feed = self.fetch_page_feed(facebook_user, page_name, 'name,about,posts,videos,photos') - + feed = self.fetch_page_feed(facebook_user, page_name, "name,about,posts,videos,photos") + data = {} - data['title'] = feed.get('name', "%s on Facebook" % page_name) - data['link'] = feed.get('link', "https://facebook.com/%s" % page_name) - data['description'] = feed.get('about', "%s on Facebook" % page_name) - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Facebook API Decrapifier - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = self.feed.feed_address + data["title"] = feed.get("name", "%s on Facebook" % page_name) + data["link"] = feed.get("link", "https://facebook.com/%s" % page_name) + data["description"] = feed.get("about", "%s on Facebook" % page_name) + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Facebook API Decrapifier - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = self.feed.feed_address rss = feedgenerator.Atom1Feed(**data) merged_data = [] - - posts = feed.get('posts', {}).get('data', None) + + posts = feed.get("posts", {}).get("data", None) if posts: for post in posts: story_data = self.page_posts_story(facebook_user, post) if not story_data: continue merged_data.append(story_data) - - videos = feed.get('videos', {}).get('data', None) + + videos = feed.get("videos", {}).get("data", None) if videos: for video in videos: story_data = self.page_video_story(facebook_user, video) if not story_data: continue for seen_data in merged_data: - if story_data['link'] == seen_data['link']: + if story_data["link"] == seen_data["link"]: # Video wins over posts (and attachments) - seen_data['description'] = story_data['description'] - seen_data['title'] = story_data['title'] + seen_data["description"] = story_data["description"] + seen_data["title"] = story_data["title"] break - + for story_data in merged_data: rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def extract_page_name(self): page = None try: - page_groups = re.search('facebook.com/(\w+)/?', self.feed.feed_address) + page_groups = re.search("facebook.com/(\w+)/?", self.feed.feed_address) if not page_groups: return page = page_groups.group(1) except IndexError: return - + return page - + def facebook_user(self): facebook_api = None social_services = None - - if self.options.get('requesting_user_id', None): - social_services = MSocialServices.get_user(self.options.get('requesting_user_id')) + + if self.options.get("requesting_user_id", None): + social_services = MSocialServices.get_user(self.options.get("requesting_user_id")) facebook_api = social_services.facebook_api() if not facebook_api: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % - (self.feed.log_title[:30], self.feed.feed_address, self.options)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s" + % (self.feed.log_title[:30], self.feed.feed_address, self.options) + ) return else: usersubs = UserSubscription.objects.filter(feed=self.feed) if not usersubs: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions' % - (self.feed.log_title[:30], self.feed.feed_address)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No subscriptions" + % (self.feed.log_title[:30], self.feed.feed_address) + ) return for sub in usersubs: social_services = MSocialServices.get_user(sub.user_id) - if not social_services.facebook_uid: + if not social_services.facebook_uid: continue facebook_api = social_services.facebook_api() - if not facebook_api: + if not facebook_api: continue else: break - + if not facebook_api: - logging.debug(' ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s' % - (self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username)) + logging.debug( + " ***> [%-30s] ~FRFacebook fetch failed: %s: No facebook API for %s" + % (self.feed.log_title[:30], self.feed.feed_address, usersubs[0].user.username) + ) return - + return facebook_api - + def fetch_page_feed(self, facebook_user, page, fields): try: stories = facebook_user.get_object(page, fields=fields) except GraphAPIError as e: message = str(e).lower() - if 'session has expired' in message: - logging.debug(' ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s' % - (self.feed.log_title[:30], self.feed.feed_address, e)) + if "session has expired" in message: + logging.debug( + " ***> [%-30s] ~FRFacebook page failed/expired, disconnecting facebook: %s: %s" + % (self.feed.log_title[:30], self.feed.feed_address, e) + ) self.feed.save_feed_history(560, "Facebook Error: Expired token") return {} - + if not stories: return {} return stories - + def page_posts_story(self, facebook_user, page_story): categories = set() - if 'message' not in page_story: + if "message" not in page_story: # Probably a story shared on the page's timeline, not a published story return - message = linebreaks(page_story['message']) - created_date = page_story['created_time'] + message = linebreaks(page_story["message"]) + created_date = page_story["created_time"] if isinstance(created_date, str): created_date = dateutil.parser.parse(created_date) - fields = facebook_user.get_object(page_story['id'], fields='permalink_url,link,attachments') - permalink = fields.get('link', fields['permalink_url']) + fields = facebook_user.get_object(page_story["id"], fields="permalink_url,link,attachments") + permalink = fields.get("link", fields["permalink_url"]) attachments_html = "" - if fields.get('attachments', None) and fields['attachments']['data']: - for attachment in fields['attachments']['data']: - if 'media' in attachment: - attachments_html += "" % attachment['media']['image']['src'] - if attachment.get('subattachments', None): - for subattachment in attachment['subattachments']['data']: - attachments_html += "" % subattachment['media']['image']['src'] - + if fields.get("attachments", None) and fields["attachments"]["data"]: + for attachment in fields["attachments"]["data"]: + if "media" in attachment: + attachments_html += '' % attachment["media"]["image"]["src"] + if attachment.get("subattachments", None): + for subattachment in attachment["subattachments"]["data"]: + attachments_html += '' % subattachment["media"]["image"]["src"] + content = """
%s
%s
""" % ( message, - attachments_html + attachments_html, ) - + story = { - 'title': message, - 'link': permalink, - 'description': content, - 'categories': list(categories), - 'unique_id': "fb_post:%s" % page_story['id'], - 'pubdate': created_date, + "title": message, + "link": permalink, + "description": content, + "categories": list(categories), + "unique_id": "fb_post:%s" % page_story["id"], + "pubdate": created_date, } - + return story - + def page_video_story(self, facebook_user, page_story): categories = set() - if 'description' not in page_story: + if "description" not in page_story: return - message = linebreaks(page_story['description']) - created_date = page_story['updated_time'] + message = linebreaks(page_story["description"]) + created_date = page_story["updated_time"] if isinstance(created_date, str): created_date = dateutil.parser.parse(created_date) - permalink = facebook_user.get_object(page_story['id'], fields='permalink_url')['permalink_url'] - embed_html = facebook_user.get_object(page_story['id'], fields='embed_html') - - if permalink.startswith('/'): + permalink = facebook_user.get_object(page_story["id"], fields="permalink_url")["permalink_url"] + embed_html = facebook_user.get_object(page_story["id"], fields="embed_html") + + if permalink.startswith("/"): permalink = "https://www.facebook.com%s" % permalink - + content = """
%s
%s
""" % ( message, - embed_html.get('embed_html', '') + embed_html.get("embed_html", ""), ) - + story = { - 'title': page_story.get('story', message), - 'link': permalink, - 'description': content, - 'categories': list(categories), - 'unique_id': "fb_post:%s" % page_story['id'], - 'pubdate': created_date, + "title": page_story.get("story", message), + "link": permalink, + "description": content, + "categories": list(categories), + "unique_id": "fb_post:%s" % page_story["id"], + "pubdate": created_date, } - + return story - + def favicon_url(self): page_name = self.extract_page_name() facebook_user = self.facebook_user() if not facebook_user: - logging.debug(' ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s' % - (self.feed.log_title[:30], self.feed.feed_address)) + logging.debug( + " ***> [%-30s] ~FRFacebook icon failed, disconnecting facebook: %s" + % (self.feed.log_title[:30], self.feed.feed_address) + ) return - + try: - picture_data = facebook_user.get_object(page_name, fields='picture') + picture_data = facebook_user.get_object(page_name, fields="picture") except GraphAPIError as e: message = str(e).lower() - if 'session has expired' in message: - logging.debug(' ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s' % - (self.feed.log_title[:30], self.feed.feed_address, e)) + if "session has expired" in message: + logging.debug( + " ***> [%-30s] ~FRFacebook icon failed/expired, disconnecting facebook: %s: %s" + % (self.feed.log_title[:30], self.feed.feed_address, e) + ) return - if 'picture' in picture_data: - return picture_data['picture']['data']['url'] - \ No newline at end of file + if "picture" in picture_data: + return picture_data["picture"]["data"]["url"] diff --git a/utils/feed_fetcher.py b/utils/feed_fetcher.py index c6b804418e..2955ac203c 100644 --- a/utils/feed_fetcher.py +++ b/utils/feed_fetcher.py @@ -37,8 +37,8 @@ from apps.rss_feeds.page_importer import PageImporter from apps.statistics.models import MAnalyticsFetcher, MStatistics -feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['iframe']) -feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(['text']) +feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(["iframe"]) +feedparser.sanitizer._HTMLSanitizer.acceptable_elements.update(["text"]) from bs4 import BeautifulSoup from celery.exceptions import SoftTimeLimitExceeded @@ -81,15 +81,15 @@ def fetch(self): """ start = time.time() identity = self.get_identity() - if self.options.get('archive_page', None): - log_msg = '%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY) ~BG~FMarchive page~ST~FY: ~SB%s' % ( + if self.options.get("archive_page", None): + log_msg = "%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY) ~BG~FMarchive page~ST~FY: ~SB%s" % ( identity, self.feed.log_title[:30], self.feed.id, - self.options['archive_page'], + self.options["archive_page"], ) else: - log_msg = '%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s' % ( + log_msg = "%2s ---> [%-30s] ~FYFetching feed (~FB%d~FY), last update: %s" % ( identity, self.feed.log_title[:30], self.feed.id, @@ -101,85 +101,87 @@ def fetch(self): modified = self.feed.last_modified.utctimetuple()[:7] if self.feed.last_modified else None address = self.feed.feed_address - if self.options.get('force') or self.options.get('archive_page', None) or random.random() <= 0.01: - self.options['force'] = True + if self.options.get("force") or self.options.get("archive_page", None) or random.random() <= 0.01: + self.options["force"] = True modified = None etag = None - if self.options.get('archive_page', None) == "rfc5005" and self.options.get('archive_page_link', None): - address = self.options['archive_page_link'] - elif self.options.get('archive_page', None): - address = qurl(address, add={self.options['archive_page_key']: self.options['archive_page']}) - elif address.startswith('http'): + if self.options.get("archive_page", None) == "rfc5005" and self.options.get( + "archive_page_link", None + ): + address = self.options["archive_page_link"] + elif self.options.get("archive_page", None): + address = qurl(address, add={self.options["archive_page_key"]: self.options["archive_page"]}) + elif address.startswith("http"): address = qurl(address, add={"_": random.randint(0, 10000)}) - logging.debug(' ---> [%-30s] ~FBForcing fetch: %s' % (self.feed.log_title[:30], address)) + logging.debug(" ---> [%-30s] ~FBForcing fetch: %s" % (self.feed.log_title[:30], address)) elif not self.feed.fetched_once or not self.feed.known_good: modified = None etag = None - if self.options.get('feed_xml'): + if self.options.get("feed_xml"): logging.debug( - ' ---> [%-30s] ~FM~BKFeed has been fat pinged. Ignoring fat: %s' - % (self.feed.log_title[:30], len(self.options.get('feed_xml'))) + " ---> [%-30s] ~FM~BKFeed has been fat pinged. Ignoring fat: %s" + % (self.feed.log_title[:30], len(self.options.get("feed_xml"))) ) - if self.options.get('fpf'): - self.fpf = self.options.get('fpf') + if self.options.get("fpf"): + self.fpf = self.options.get("fpf") logging.debug( - ' ---> [%-30s] ~FM~BKFeed fetched in real-time with fat ping.' % (self.feed.log_title[:30]) + " ---> [%-30s] ~FM~BKFeed fetched in real-time with fat ping." % (self.feed.log_title[:30]) ) return FEED_OK, self.fpf - if 'youtube.com' in address: + if "youtube.com" in address: youtube_feed = self.fetch_youtube() if not youtube_feed: logging.debug( - ' ***> [%-30s] ~FRYouTube fetch failed: %s.' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRYouTube fetch failed: %s." % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(youtube_feed, sanitize_html=False) - elif re.match(r'(https?)?://twitter.com/\w+/?', qurl(address, remove=['_'])): + elif re.match(r"(https?)?://twitter.com/\w+/?", qurl(address, remove=["_"])): twitter_feed = self.fetch_twitter(address) if not twitter_feed: logging.debug( - ' ***> [%-30s] ~FRTwitter fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRTwitter fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(twitter_feed) - elif re.match(r'(.*?)facebook.com/\w+/?$', qurl(address, remove=['_'])): + elif re.match(r"(.*?)facebook.com/\w+/?$", qurl(address, remove=["_"])): facebook_feed = self.fetch_facebook() if not facebook_feed: logging.debug( - ' ***> [%-30s] ~FRFacebook fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRFacebook fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(facebook_feed) - if not self.fpf and 'json' in address: + if not self.fpf and "json" in address: try: headers = self.feed.fetch_headers() if etag: - headers['If-None-Match'] = etag + headers["If-None-Match"] = etag if modified: # format into an RFC 1123-compliant timestamp. We can't use # time.strftime() since the %a and %b directives can be affected # by the current locale, but RFC 2616 states that dates must be # in English. - short_weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] + short_weekdays = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"] months = [ - 'Jan', - 'Feb', - 'Mar', - 'Apr', - 'May', - 'Jun', - 'Jul', - 'Aug', - 'Sep', - 'Oct', - 'Nov', - 'Dec', + "Jan", + "Feb", + "Mar", + "Apr", + "May", + "Jun", + "Jul", + "Aug", + "Sep", + "Oct", + "Nov", + "Dec", ] - modified_header = '%s, %02d %s %04d %02d:%02d:%02d GMT' % ( + modified_header = "%s, %02d %s %04d %02d:%02d:%02d GMT" % ( short_weekdays[modified[6]], modified[2], months[modified[1] - 1], @@ -188,9 +190,9 @@ def fetch(self): modified[4], modified[5], ) - headers['If-Modified-Since'] = modified_header + headers["If-Modified-Since"] = modified_header if etag or modified: - headers['A-IM'] = 'feed' + headers["A-IM"] = "feed" try: raw_feed = requests.get(address, headers=headers, timeout=15) except (requests.adapters.ConnectionError, TimeoutError): @@ -202,7 +204,10 @@ def fetch(self): % (self.feed.log_title[:30], raw_feed.status_code, raw_feed.headers) ) else: - logging.debug(" ***> [%-30s] ~FRJson feed fetch timed out, trying fake headers: %s" % (self.feed.log_title[:30], address)) + logging.debug( + " ***> [%-30s] ~FRJson feed fetch timed out, trying fake headers: %s" + % (self.feed.log_title[:30], address) + ) raw_feed = requests.get( self.feed.feed_address, headers=self.feed.fetch_headers(fake=True), @@ -210,24 +215,24 @@ def fetch(self): ) json_feed_content_type = any( - json_feed in raw_feed.headers.get('Content-Type', "") - for json_feed in ['application/feed+json', 'application/json'] + json_feed in raw_feed.headers.get("Content-Type", "") + for json_feed in ["application/feed+json", "application/json"] ) if raw_feed.content and json_feed_content_type: # JSON Feed json_feed = self.fetch_json_feed(address, raw_feed) if not json_feed: logging.debug( - ' ***> [%-30s] ~FRJSON fetch failed: %s' % (self.feed.log_title[:30], address) + " ***> [%-30s] ~FRJSON fetch failed: %s" % (self.feed.log_title[:30], address) ) return FEED_ERRHTTP, None self.fpf = feedparser.parse(json_feed) elif raw_feed.content and raw_feed.status_code < 400: response_headers = raw_feed.headers - response_headers['Content-Location'] = raw_feed.url + response_headers["Content-Location"] = raw_feed.url self.raw_feed = smart_str(raw_feed.content) self.fpf = feedparser.parse(self.raw_feed, response_headers=response_headers) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( " ---> [%-30s] ~FBFeed fetch status %s: %s length / %s" % ( @@ -244,7 +249,7 @@ def fetch(self): ) # raise e - if not self.fpf or self.options.get('force_fp', False): + if not self.fpf or self.options.get("force_fp", False): try: self.fpf = feedparser.parse(address, agent=self.feed.user_agent, etag=etag, modified=modified) except ( @@ -260,12 +265,14 @@ def fetch(self): ConnectionResetError, TimeoutError, ) as e: - logging.debug(' ***> [%-30s] ~FRFeed fetch error: %s' % (self.feed.log_title[:30], e)) + logging.debug(" ***> [%-30s] ~FRFeed fetch error: %s" % (self.feed.log_title[:30], e)) pass if not self.fpf: try: - logging.debug(' ***> [%-30s] ~FRTurning off headers: %s' % (self.feed.log_title[:30], address)) + logging.debug( + " ***> [%-30s] ~FRTurning off headers: %s" % (self.feed.log_title[:30], address) + ) self.fpf = feedparser.parse(address, agent=self.feed.user_agent) except ( TypeError, @@ -279,11 +286,11 @@ def fetch(self): http.client.IncompleteRead, ConnectionResetError, ) as e: - logging.debug(' ***> [%-30s] ~FRFetch failed: %s.' % (self.feed.log_title[:30], e)) + logging.debug(" ***> [%-30s] ~FRFetch failed: %s." % (self.feed.log_title[:30], e)) return FEED_ERRHTTP, None logging.debug( - ' ---> [%-30s] ~FYFeed fetch in ~FM%.4ss' % (self.feed.log_title[:30], time.time() - start) + " ---> [%-30s] ~FYFeed fetch in ~FM%.4ss" % (self.feed.log_title[:30], time.time() - start) ) return FEED_OK, self.fpf @@ -333,21 +340,21 @@ def process(self): start = time.time() self.refresh_feed() - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): feed_status, ret_values = self.verify_feed_integrity() if feed_status and ret_values: return feed_status, ret_values - + self.fpf.entries = self.fpf.entries[:100] - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): self.compare_feed_attribute_changes() # Determine if stories aren't valid and replace broken guids guids_seen = set() permalinks_seen = set() for entry in self.fpf.entries: - guids_seen.add(entry.get('guid')) + guids_seen.add(entry.get("guid")) permalinks_seen.add(Feed.get_permalink(entry)) guid_difference = len(guids_seen) != len(self.fpf.entries) single_guid = len(guids_seen) == 1 @@ -363,45 +370,45 @@ def process(self): stories = [] for entry in self.fpf.entries: story = pre_process_story(entry, self.fpf.encoding) - if not story['title'] and not story['story_content']: + if not story["title"] and not story["story_content"]: continue - if self.options.get('archive_page', None) and story.get('published') > day_ago: + if self.options.get("archive_page", None) and story.get("published") > day_ago: # Archive only: Arbitrary but necessary to prevent feeds from creating an unlimited number of stories # because they don't have a guid so it gets auto-generated based on the date, and if the story # is missing a date, then the latest date gets used. So reject anything newer than 24 hours old # when filling out the archive. # logging.debug(f" ---> [%-30s] ~FBTossing story because it's too new for the archive: ~SB{story}") continue - if story.get('published') < start_date: - start_date = story.get('published') + if story.get("published") < start_date: + start_date = story.get("published") if replace_guids: if replace_permalinks: - new_story_guid = str(story.get('published')) - if self.options['verbose']: + new_story_guid = str(story.get("published")) + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBReplacing guid (%s) with timestamp: %s' - % (self.feed.log_title[:30], story.get('guid'), new_story_guid) + " ---> [%-30s] ~FBReplacing guid (%s) with timestamp: %s" + % (self.feed.log_title[:30], story.get("guid"), new_story_guid) ) - story['guid'] = new_story_guid + story["guid"] = new_story_guid else: new_story_guid = Feed.get_permalink(story) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBReplacing guid (%s) with permalink: %s' - % (self.feed.log_title[:30], story.get('guid'), new_story_guid) + " ---> [%-30s] ~FBReplacing guid (%s) with permalink: %s" + % (self.feed.log_title[:30], story.get("guid"), new_story_guid) ) - story['guid'] = new_story_guid - story['story_hash'] = MStory.feed_guid_hash_unsaved(self.feed.pk, story.get('guid')) + story["guid"] = new_story_guid + story["story_hash"] = MStory.feed_guid_hash_unsaved(self.feed.pk, story.get("guid")) stories.append(story) - story_hashes.append(story.get('story_hash')) + story_hashes.append(story.get("story_hash")) original_story_hash_count = len(story_hashes) story_hashes_in_unread_cutoff = self.feed.story_hashes_in_unread_cutoff[:original_story_hash_count] story_hashes.extend(story_hashes_in_unread_cutoff) story_hashes = list(set(story_hashes)) - if self.options['verbose'] or settings.DEBUG: + if self.options["verbose"] or settings.DEBUG: logging.debug( - ' ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN/%s guids from db' + " ---> [%-30s] ~FBFound ~SB%s~SN guids, adding ~SB%s~SN/%s guids from db" % ( self.feed.log_title[:30], original_story_hash_count, @@ -427,53 +434,53 @@ def process(self): ret_values = self.feed.add_update_stories( stories, existing_stories, - verbose=self.options['verbose'], - updates_off=self.options['updates_off'], + verbose=self.options["verbose"], + updates_off=self.options["updates_off"], ) # PubSubHubbub - if not self.options.get('archive_page', None): + if not self.options.get("archive_page", None): self.check_feed_for_push() # Push notifications - if ret_values['new'] > 0 and MUserFeedNotification.feed_has_users(self.feed.pk) > 0: - QueueNotifications.delay(self.feed.pk, ret_values['new']) + if ret_values["new"] > 0 and MUserFeedNotification.feed_has_users(self.feed.pk) > 0: + QueueNotifications.delay(self.feed.pk, ret_values["new"]) # All Done logging.debug( - ' ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s' + " ---> [%-30s] ~FYParsed Feed: %snew=%s~SN~FY %sup=%s~SN same=%s%s~SN %serr=%s~SN~FY total=~SB%s" % ( self.feed.log_title[:30], - '~FG~SB' if ret_values['new'] else '', - ret_values['new'], - '~FY~SB' if ret_values['updated'] else '', - ret_values['updated'], - '~SB' if ret_values['same'] else '', - ret_values['same'], - '~FR~SB' if ret_values['error'] else '', - ret_values['error'], + "~FG~SB" if ret_values["new"] else "", + ret_values["new"], + "~FY~SB" if ret_values["updated"] else "", + ret_values["updated"], + "~SB" if ret_values["same"] else "", + ret_values["same"], + "~FR~SB" if ret_values["error"] else "", + ret_values["error"], len(self.fpf.entries), ) ) - self.feed.update_all_statistics(has_new_stories=bool(ret_values['new']), force=self.options['force']) + self.feed.update_all_statistics(has_new_stories=bool(ret_values["new"]), force=self.options["force"]) fetch_date = datetime.datetime.now() - if ret_values['new']: - if not getattr(settings, 'TEST_DEBUG', False): + if ret_values["new"]: + if not getattr(settings, "TEST_DEBUG", False): self.feed.trim_feed() self.feed.expire_redis() - if MStatistics.get('raw_feed', None) == self.feed.pk: + if MStatistics.get("raw_feed", None) == self.feed.pk: self.feed.save_raw_feed(self.raw_feed, fetch_date) self.feed.save_feed_history(200, "OK", date=fetch_date) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss' + " ---> [%-30s] ~FBTIME: feed parse in ~FM%.4ss" % (self.feed.log_title[:30], time.time() - start) ) - if self.options.get('archive_page', None): + if self.options.get("archive_page", None): self.archive_seen_story_hashes.update(story_hashes) - + return FEED_OK, ret_values def verify_feed_integrity(self): @@ -487,12 +494,12 @@ def verify_feed_integrity(self): if not self.feed: return FEED_ERREXC, ret_values - - if hasattr(self.fpf, 'status'): - if self.options['verbose']: + + if hasattr(self.fpf, "status"): + if self.options["verbose"]: if self.fpf.bozo and self.fpf.status != 304: logging.debug( - ' ---> [%-30s] ~FRBOZO exception: %s ~SB(%s entries)' + " ---> [%-30s] ~FRBOZO exception: %s ~SB(%s entries)" % (self.feed.log_title[:30], self.fpf.bozo_exception, len(self.fpf.entries)) ) @@ -504,16 +511,16 @@ def verify_feed_integrity(self): # 302 and 307: Temporary redirect: ignore # 301 and 308: Permanent redirect: save it (after 10 tries) if self.fpf.status == 301 or self.fpf.status == 308: - if self.fpf.href.endswith('feedburner.com/atom.xml'): + if self.fpf.href.endswith("feedburner.com/atom.xml"): return FEED_ERRHTTP, ret_values - redirects, non_redirects = self.feed.count_redirects_in_history('feed') + redirects, non_redirects = self.feed.count_redirects_in_history("feed") self.feed.save_feed_history( self.fpf.status, "HTTP Redirect (%d to go)" % (10 - len(redirects)) ) if len(redirects) >= 10 or len(non_redirects) == 0: address = self.fpf.href - if self.options['force'] and address: - address = qurl(address, remove=['_']) + if self.options["force"] and address: + address = qurl(address, remove=["_"]) self.feed.feed_address = address if not self.feed.known_good: self.feed.fetched_once = True @@ -559,7 +566,7 @@ def verify_feed_integrity(self): if not self.feed.known_good: fixed_feed, feed = self.feed.check_feed_link_for_feed_address() if not fixed_feed: - self.feed.save_feed_history(552, 'Non-xml feed', self.fpf.bozo_exception) + self.feed.save_feed_history(552, "Non-xml feed", self.fpf.bozo_exception) else: self.feed = feed self.feed = self.feed.save() @@ -573,7 +580,7 @@ def verify_feed_integrity(self): if not self.feed.known_good: fixed_feed, feed = self.feed.check_feed_link_for_feed_address() if not fixed_feed: - self.feed.save_feed_history(553, 'Not an RSS feed', self.fpf.bozo_exception) + self.feed.save_feed_history(553, "Not an RSS feed", self.fpf.bozo_exception) else: self.feed = feed self.feed = self.feed.save() @@ -588,69 +595,69 @@ def compare_feed_attribute_changes(self): if not self.feed: logging.debug(f"Missing feed: {self.feed}") return - + original_etag = self.feed.etag - self.feed.etag = self.fpf.get('etag') + self.feed.etag = self.fpf.get("etag") if self.feed.etag: self.feed.etag = self.feed.etag[:255] # some times this is None (it never should) *sigh* if self.feed.etag is None: - self.feed.etag = '' + self.feed.etag = "" if self.feed.etag != original_etag: - self.feed.save(update_fields=['etag']) + self.feed.save(update_fields=["etag"]) original_last_modified = self.feed.last_modified - if hasattr(self.fpf, 'modified') and self.fpf.modified: + if hasattr(self.fpf, "modified") and self.fpf.modified: try: self.feed.last_modified = datetime.datetime.strptime( - self.fpf.modified, '%a, %d %b %Y %H:%M:%S %Z' + self.fpf.modified, "%a, %d %b %Y %H:%M:%S %Z" ) except Exception as e: self.feed.last_modified = None logging.debug("Broken mtime %s: %s" % (self.feed.last_modified, e)) pass if self.feed.last_modified != original_last_modified: - self.feed.save(update_fields=['last_modified']) + self.feed.save(update_fields=["last_modified"]) original_title = self.feed.feed_title - if self.fpf.feed.get('title'): - self.feed.feed_title = strip_tags(self.fpf.feed.get('title')) + if self.fpf.feed.get("title"): + self.feed.feed_title = strip_tags(self.fpf.feed.get("title")) if self.feed.feed_title != original_title: - self.feed.save(update_fields=['feed_title']) + self.feed.save(update_fields=["feed_title"]) - tagline = self.fpf.feed.get('tagline', self.feed.data.feed_tagline) + tagline = self.fpf.feed.get("tagline", self.feed.data.feed_tagline) if tagline: original_tagline = self.feed.data.feed_tagline self.feed.data.feed_tagline = smart_str(tagline) if self.feed.data.feed_tagline != original_tagline: - self.feed.data.save(update_fields=['feed_tagline']) + self.feed.data.save(update_fields=["feed_tagline"]) if not self.feed.feed_link_locked: - new_feed_link = self.fpf.feed.get('link') or self.fpf.feed.get('id') or self.feed.feed_link - if self.options['force'] and new_feed_link: - new_feed_link = qurl(new_feed_link, remove=['_']) + new_feed_link = self.fpf.feed.get("link") or self.fpf.feed.get("id") or self.feed.feed_link + if self.options["force"] and new_feed_link: + new_feed_link = qurl(new_feed_link, remove=["_"]) if new_feed_link != self.feed.feed_link: logging.debug( " ---> [%-30s] ~SB~FRFeed's page is different: %s to %s" % (self.feed.log_title[:30], self.feed.feed_link, new_feed_link) ) - redirects, non_redirects = self.feed.count_redirects_in_history('page') + redirects, non_redirects = self.feed.count_redirects_in_history("page") self.feed.save_page_history(301, "HTTP Redirect (%s to go)" % (10 - len(redirects))) if len(redirects) >= 10 or len(non_redirects) == 0: self.feed.feed_link = new_feed_link - self.feed.save(update_fields=['feed_link']) + self.feed.save(update_fields=["feed_link"]) def check_feed_for_push(self): - if not (hasattr(self.fpf, 'feed') and hasattr(self.fpf.feed, 'links') and self.fpf.feed.links): + if not (hasattr(self.fpf, "feed") and hasattr(self.fpf.feed, "links") and self.fpf.feed.links): return - + hub_url = None self_url = self.feed.feed_address for link in self.fpf.feed.links: - if link['rel'] == 'hub' and not hub_url: - hub_url = link['href'] - elif link['rel'] == 'self': - self_url = link['href'] + if link["rel"] == "hub" and not hub_url: + hub_url = link["href"] + elif link["rel"] == "self": + self_url = link["href"] push_expired = False if self.feed.is_push: try: @@ -662,10 +669,10 @@ def check_feed_for_push(self): and self_url and not settings.DEBUG and self.feed.active_subscribers > 0 - and (push_expired or not self.feed.is_push or self.options.get('force')) + and (push_expired or not self.feed.is_push or self.options.get("force")) ): logging.debug( - ' ---> [%-30s] ~BB~FW%sSubscribing to PuSH hub: %s' + " ---> [%-30s] ~BB~FW%sSubscribing to PuSH hub: %s" % (self.feed.log_title[:30], "~SKRe-~SN" if push_expired else "", hub_url) ) try: @@ -673,13 +680,11 @@ def check_feed_for_push(self): PushSubscription.objects.subscribe(self_url, feed=self.feed, hub=hub_url) except TimeoutError: logging.debug( - ' ---> [%-30s] ~BB~FW~FRTimed out~FW subscribing to PuSH hub: %s' + " ---> [%-30s] ~BB~FW~FRTimed out~FW subscribing to PuSH hub: %s" % (self.feed.log_title[:30], hub_url) ) elif self.feed.is_push and (self.feed.active_subscribers <= 0 or not hub_url): - logging.debug( - ' ---> [%-30s] ~BB~FWTurning off PuSH, no hub found' % (self.feed.log_title[:30]) - ) + logging.debug(" ---> [%-30s] ~BB~FWTurning off PuSH, no hub found" % (self.feed.log_title[:30])) self.feed.is_push = False self.feed = self.feed.save() @@ -695,11 +700,11 @@ def __init__(self, options): FEED_ERREXC: 0, } self.feed_trans = { - FEED_OK: 'ok', - FEED_SAME: 'unchanged', - FEED_ERRPARSE: 'cant_parse', - FEED_ERRHTTP: 'http_error', - FEED_ERREXC: 'exception', + FEED_OK: "ok", + FEED_SAME: "unchanged", + FEED_ERRPARSE: "cant_parse", + FEED_ERRHTTP: "http_error", + FEED_ERREXC: "exception", } self.feed_keys = sorted(self.feed_trans.keys()) self.time_start = datetime.datetime.utcnow() @@ -713,15 +718,15 @@ def reset_database_connections(self): connection._connection_settings = {} connection._dbs = {} settings.MONGODB = connect(settings.MONGO_DB_NAME, **settings.MONGO_DB) - if 'username' in settings.MONGO_ANALYTICS_DB: + if "username" in settings.MONGO_ANALYTICS_DB: settings.MONGOANALYTICSDB = connect( - db=settings.MONGO_ANALYTICS_DB['name'], + db=settings.MONGO_ANALYTICS_DB["name"], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['username']}:{settings.MONGO_ANALYTICS_DB['password']}@{settings.MONGO_ANALYTICS_DB['host']}/?authSource=admin", alias="nbanalytics", ) else: settings.MONGOANALYTICSDB = connect( - db=settings.MONGO_ANALYTICS_DB['name'], + db=settings.MONGO_ANALYTICS_DB["name"], host=f"mongodb://{settings.MONGO_ANALYTICS_DB['host']}/", alias="nbanalytics", ) @@ -738,15 +743,15 @@ def process_feed_wrapper(self, feed_queue): identity = current_process._identity[0] # If fetching archive pages, come back once the archive scaffolding is built - if self.options.get('archive_page', None): + if self.options.get("archive_page", None): for feed_id in feed_queue: feed = self.refresh_feed(feed_id) try: self.fetch_and_process_archive_pages(feed_id) except SoftTimeLimitExceeded: logging.debug( - ' ---> [%-30s] ~FRTime limit reached while fetching ~FGarchive pages~FR. Made it to ~SB%s' - % (feed.log_title[:30], self.options['archive_page']) + " ---> [%-30s] ~FRTime limit reached while fetching ~FGarchive pages~FR. Made it to ~SB%s" + % (feed.log_title[:30], self.options["archive_page"]) ) pass if len(feed_queue) == 1: @@ -771,21 +776,21 @@ def process_feed_wrapper(self, feed_queue): set_user({"id": feed_id, "username": feed.feed_title}) skip = False - if self.options.get('fake'): + if self.options.get("fake"): skip = True weight = "-" quick = "-" rand = "-" elif ( - self.options.get('quick') - and not self.options['force'] + self.options.get("quick") + and not self.options["force"] and feed.known_good and feed.fetched_once and not feed.is_push ): weight = feed.stories_last_month * feed.num_subscribers random_weight = random.randint(1, max(weight, 1)) - quick = float(self.options.get('quick', 0)) + quick = float(self.options.get("quick", 0)) rand = random.random() if random_weight < 1000 and rand < quick: skip = True @@ -796,7 +801,7 @@ def process_feed_wrapper(self, feed_queue): rand = "-" if skip: logging.debug( - ' ---> [%-30s] ~BGFaking fetch, skipping (%s/month, %s subs, %s < %s)...' + " ---> [%-30s] ~BGFaking fetch, skipping (%s/month, %s subs, %s < %s)..." % (feed.log_title[:30], weight, feed.num_subscribers, rand, quick) ) continue @@ -807,74 +812,74 @@ def process_feed_wrapper(self, feed_queue): feed_fetch_duration = time.time() - start_duration raw_feed = ffeed.raw_feed - if fetched_feed and (ret_feed == FEED_OK or self.options['force']): + if fetched_feed and (ret_feed == FEED_OK or self.options["force"]): pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) ret_feed, ret_entries = pfeed.process() feed = pfeed.feed feed_process_duration = time.time() - start_duration - if (ret_entries and ret_entries['new']) or self.options['force']: + if (ret_entries and ret_entries["new"]) or self.options["force"]: start = time.time() if not feed.known_good or not feed.fetched_once: feed.known_good = True feed.fetched_once = True feed = feed.save() - if self.options['force'] or random.random() <= 0.02: + if self.options["force"] or random.random() <= 0.02: logging.debug( - ' ---> [%-30s] ~FBPerforming feed cleanup...' % (feed.log_title[:30],) + " ---> [%-30s] ~FBPerforming feed cleanup..." % (feed.log_title[:30],) ) start_cleanup = time.time() feed.count_fs_size_bytes() logging.debug( - ' ---> [%-30s] ~FBDone with feed cleanup. Took ~SB%.4s~SN sec.' + " ---> [%-30s] ~FBDone with feed cleanup. Took ~SB%.4s~SN sec." % (feed.log_title[:30], time.time() - start_cleanup) ) try: self.count_unreads_for_subscribers(feed) except TimeoutError: logging.debug( - ' ---> [%-30s] Unread count took too long...' % (feed.log_title[:30],) + " ---> [%-30s] Unread count took too long..." % (feed.log_title[:30],) ) - if self.options['verbose']: + if self.options["verbose"]: logging.debug( - ' ---> [%-30s] ~FBTIME: unread count in ~FM%.4ss' + " ---> [%-30s] ~FBTIME: unread count in ~FM%.4ss" % (feed.log_title[:30], time.time() - start) ) except (urllib.error.HTTPError, urllib.error.URLError) as e: logging.debug( - ' ---> [%-30s] ~FRFeed throws HTTP error: ~SB%s' % (str(feed_id)[:30], e.reason) + " ---> [%-30s] ~FRFeed throws HTTP error: ~SB%s" % (str(feed_id)[:30], e.reason) ) feed_code = 404 feed.save_feed_history(feed_code, str(e.reason), e) fetched_feed = None except Feed.DoesNotExist: - logging.debug(' ---> [%-30s] ~FRFeed is now gone...' % (str(feed_id)[:30])) + logging.debug(" ---> [%-30s] ~FRFeed is now gone..." % (str(feed_id)[:30])) continue except SoftTimeLimitExceeded as e: logging.debug(" ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed) ret_feed = FEED_ERREXC fetched_feed = None feed_code = 559 - feed.save_feed_history(feed_code, 'Timeout', e) + feed.save_feed_history(feed_code, "Timeout", e) except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRFeed fetch timed out...' % (feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FRFeed fetch timed out..." % (feed.log_title[:30])) feed_code = 505 - feed.save_feed_history(feed_code, 'Timeout', e) + feed.save_feed_history(feed_code, "Timeout", e) fetched_feed = None except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) ret_feed = FEED_ERREXC - feed = Feed.get_by_id(getattr(feed, 'pk', feed_id)) + feed = Feed.get_by_id(getattr(feed, "pk", feed_id)) if not feed: continue feed.save_feed_history(500, "Error", tb) feed_code = 500 fetched_feed = None # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() @@ -897,7 +902,7 @@ def process_feed_wrapper(self, feed_queue): continue if ( - (self.options['force']) + (self.options["force"]) or (random.random() > 0.9) or ( fetched_feed @@ -906,8 +911,7 @@ def process_feed_wrapper(self, feed_queue): and (ret_feed == FEED_OK or (ret_feed == FEED_SAME and feed.stories_last_month > 10)) ) ): - - logging.debug(' ---> [%-30s] ~FYFetching page: %s' % (feed.log_title[:30], feed.feed_link)) + logging.debug(" ---> [%-30s] ~FYFetching page: %s" % (feed.log_title[:30], feed.feed_link)) page_importer = PageImporter(feed) try: page_data = page_importer.fetch_page() @@ -917,27 +921,27 @@ def process_feed_wrapper(self, feed_queue): " ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed ) page_data = None - feed.save_feed_history(557, 'Timeout', e) + feed.save_feed_history(557, "Timeout", e) except TimeoutError: - logging.debug(' ---> [%-30s] ~FRPage fetch timed out...' % (feed.log_title[:30])) + logging.debug(" ---> [%-30s] ~FRPage fetch timed out..." % (feed.log_title[:30])) page_data = None - feed.save_page_history(555, 'Timeout', '') + feed.save_page_history(555, "Timeout", "") except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) feed.save_page_history(550, "Page Error", tb) fetched_feed = None page_data = None # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() feed = self.refresh_feed(feed.pk) - logging.debug(' ---> [%-30s] ~FYFetching icon: %s' % (feed.log_title[:30], feed.feed_link)) - force = self.options['force'] + logging.debug(" ---> [%-30s] ~FYFetching icon: %s" % (feed.log_title[:30], feed.feed_link)) + force = self.options["force"] if random.random() > 0.99: force = True icon_importer = IconImporter(feed, page_data=page_data, force=force) @@ -948,28 +952,28 @@ def process_feed_wrapper(self, feed_queue): logging.debug( " ---> [%-30s] ~BR~FWTime limit hit!~SB~FR Moving on to next feed..." % feed ) - feed.save_feed_history(558, 'Timeout', e) + feed.save_feed_history(558, "Timeout", e) except TimeoutError: - logging.debug(' ---> [%-30s] ~FRIcon fetch timed out...' % (feed.log_title[:30])) - feed.save_page_history(556, 'Timeout', '') + logging.debug(" ---> [%-30s] ~FRIcon fetch timed out..." % (feed.log_title[:30])) + feed.save_page_history(556, "Timeout", "") except Exception as e: - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) tb = traceback.format_exc() logging.error(tb) - logging.debug('[%d] ! -------------------------' % (feed_id,)) + logging.debug("[%d] ! -------------------------" % (feed_id,)) # feed.save_feed_history(560, "Icon Error", tb) # mail_feed_error_to_admin(feed, e, local_vars=locals()) - if not settings.DEBUG and hasattr(settings, 'SENTRY_DSN') and settings.SENTRY_DSN: + if not settings.DEBUG and hasattr(settings, "SENTRY_DSN") and settings.SENTRY_DSN: capture_exception(e) flush() else: logging.debug( - ' ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s' + " ---> [%-30s] ~FBSkipping page fetch: (%s on %s stories) %s" % ( feed.log_title[:30], self.feed_trans[ret_feed], feed.stories_last_month, - '' if feed.has_page else ' [HAS NO PAGE]', + "" if feed.has_page else " [HAS NO PAGE]", ) ) @@ -979,7 +983,7 @@ def process_feed_wrapper(self, feed_queue): feed.last_load_time = round(delta) feed.fetched_once = True try: - feed = feed.save(update_fields=['last_load_time', 'fetched_once']) + feed = feed.save(update_fields=["last_load_time", "fetched_once"]) except IntegrityError: logging.debug( " ***> [%-30s] ~FRIntegrityError on feed: %s" @@ -989,10 +993,10 @@ def process_feed_wrapper(self, feed_queue): ) ) - if ret_entries and ret_entries['new']: - self.publish_to_subscribers(feed, ret_entries['new']) + if ret_entries and ret_entries["new"]: + self.publish_to_subscribers(feed, ret_entries["new"]) - done_msg = '%2s ---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN (~FB%s~FY) [%s]' % ( + done_msg = "%2s ---> [%-30s] ~FYProcessed in ~FM~SB%.4ss~FY~SN (~FB%s~FY) [%s]" % ( identity, feed.log_title[:30], delta, @@ -1021,31 +1025,38 @@ def process_feed_wrapper(self, feed_queue): def fetch_and_process_archive_pages(self, feed_id): feed = Feed.get_by_id(feed_id) first_seen_feed = None - original_starting_page = self.options['archive_page'] - + original_starting_page = self.options["archive_page"] + for archive_page_key in ["page", "paged", "rfc5005"]: seen_story_hashes = set() failed_pages = 0 - self.options['archive_page_key'] = archive_page_key + self.options["archive_page_key"] = archive_page_key if archive_page_key == "rfc5005": - self.options['archive_page'] = "rfc5005" + self.options["archive_page"] = "rfc5005" link_prev_archive = None if first_seen_feed: - for link in getattr(first_seen_feed.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] - logging.debug(' ---> [%-30s] ~FGFeed has ~SBRFC5005~SN links, filling out archive: %s' % (feed.log_title[:30], link_prev_archive)) + for link in getattr(first_seen_feed.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] + logging.debug( + " ---> [%-30s] ~FGFeed has ~SBRFC5005~SN links, filling out archive: %s" + % (feed.log_title[:30], link_prev_archive) + ) break else: - logging.debug(' ---> [%-30s] ~FBFeed has no RFC5005 links...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBFeed has no RFC5005 links..." % (feed.log_title[:30]) + ) else: - self.options['archive_page_link'] = link_prev_archive + self.options["archive_page_link"] = link_prev_archive ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... continue @@ -1055,9 +1066,9 @@ def fetch_and_process_archive_pages(self, feed_id): pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) if not pfeed.fpf or not pfeed.fpf.entries: continue - for link in getattr(pfeed.fpf.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] + for link in getattr(pfeed.fpf.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] if not link_prev_archive: continue @@ -1065,16 +1076,21 @@ def fetch_and_process_archive_pages(self, feed_id): while True: if not link_prev_archive: break - if link_prev_archive == self.options.get('archive_page_link', None): - logging.debug(' ---> [%-30s] ~FRNo change in archive page link: %s' % (feed.log_title[:30], link_prev_archive)) - break - self.options['archive_page_link'] = link_prev_archive + if link_prev_archive == self.options.get("archive_page_link", None): + logging.debug( + " ---> [%-30s] ~FRNo change in archive page link: %s" + % (feed.log_title[:30], link_prev_archive) + ) + break + self.options["archive_page_link"] = link_prev_archive link_prev_archive = None ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... break @@ -1083,15 +1099,22 @@ def fetch_and_process_archive_pages(self, feed_id): if fetched_feed and ret_feed == FEED_OK: pfeed = ProcessFeed(feed_id, fetched_feed, self.options, raw_feed=raw_feed) if not pfeed.fpf or not pfeed.fpf.entries: - logging.debug(' ---> [%-30s] ~FRFeed parse failed, no entries' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRFeed parse failed, no entries" % (feed.log_title[:30]) + ) continue - for link in getattr(pfeed.fpf.feed, 'links', []): - if link['rel'] == 'prev-archive' or link['rel'] == 'next': - link_prev_archive = link['href'] - logging.debug(' ---> [%-30s] ~FGFeed still has ~SBRFC5005~SN links, continuing filling out archive: %s' % (feed.log_title[:30], link_prev_archive)) + for link in getattr(pfeed.fpf.feed, "links", []): + if link["rel"] == "prev-archive" or link["rel"] == "next": + link_prev_archive = link["href"] + logging.debug( + " ---> [%-30s] ~FGFeed still has ~SBRFC5005~SN links, continuing filling out archive: %s" + % (feed.log_title[:30], link_prev_archive) + ) break else: - logging.debug(' ---> [%-30s] ~FBFeed has no more RFC5005 links...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FBFeed has no more RFC5005 links..." % (feed.log_title[:30]) + ) break before_story_hashes = len(seen_story_hashes) @@ -1100,23 +1123,30 @@ def fetch_and_process_archive_pages(self, feed_id): after_story_hashes = len(seen_story_hashes) if before_story_hashes == after_story_hashes: - logging.debug(' ---> [%-30s] ~FRNo change in story hashes, but has archive link: %s' % (feed.log_title[:30], link_prev_archive)) - + logging.debug( + " ---> [%-30s] ~FRNo change in story hashes, but has archive link: %s" + % (feed.log_title[:30], link_prev_archive) + ) + failed_color = "~FR" if not link_prev_archive else "" - logging.debug(f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive RFC5005 ~SB{link_prev_archive}~SN: ~SB~FG{failed_color}{len(seen_story_hashes):,} stories~SN~FB") + logging.debug( + f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive RFC5005 ~SB{link_prev_archive}~SN: ~SB~FG{failed_color}{len(seen_story_hashes):,} stories~SN~FB" + ) else: for page in range(3 if settings.DEBUG and False else 150): if page < original_starting_page: continue - if failed_pages >= 1: + if failed_pages >= 1: break - self.options['archive_page'] = page+1 + self.options["archive_page"] = page + 1 ffeed = FetchFeed(feed_id, self.options) try: ret_feed, fetched_feed = ffeed.fetch() except TimeoutError as e: - logging.debug(' ---> [%-30s] ~FRArchive feed fetch timed out...' % (feed.log_title[:30])) + logging.debug( + " ---> [%-30s] ~FRArchive feed fetch timed out..." % (feed.log_title[:30]) + ) # Timeout means don't bother to keep checking... break @@ -1140,12 +1170,14 @@ def fetch_and_process_archive_pages(self, feed_id): else: failed_pages += 1 failed_color = "~FR" if failed_pages > 0 else "" - logging.debug(f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive page ~SB{page+1}~SN: ~SB~FG{len(seen_story_hashes):,} stories~SN~FB, {failed_color}{failed_pages} failures") + logging.debug( + f" ---> [{feed.log_title[:30]:<30}] ~FGStory hashes found, archive page ~SB{page+1}~SN: ~SB~FG{len(seen_story_hashes):,} stories~SN~FB, {failed_color}{failed_pages} failures" + ) def publish_to_subscribers(self, feed, new_count): try: r = redis.Redis(connection_pool=settings.REDIS_PUBSUB_POOL) - listeners_count = r.publish(str(feed.pk), 'story:new_count:%s' % new_count) + listeners_count = r.publish(str(feed.pk), "story:new_count:%s" % new_count) if listeners_count: logging.debug( " ---> [%-30s] ~FMPublished to %s subscribers" % (feed.log_title[:30], listeners_count) @@ -1158,7 +1190,7 @@ def count_unreads_for_subscribers(self, feed): user_subs = UserSubscription.objects.filter( feed=feed, active=True, user__profile__last_seen_on__gte=subscriber_expire - ).order_by('-last_read_date') + ).order_by("-last_read_date") if not user_subs.count(): return @@ -1168,16 +1200,16 @@ def count_unreads_for_subscribers(self, feed): sub.needs_unread_recalc = True sub.save() - if self.options['compute_scores']: + if self.options["compute_scores"]: r = redis.Redis(connection_pool=settings.REDIS_STORY_HASH_POOL) stories = MStory.objects(story_feed_id=feed.pk, story_date__gte=feed.unread_cutoff) stories = Feed.format_stories(stories, feed.pk) story_hashes = r.zrangebyscore( - 'zF:%s' % feed.pk, - int(feed.unread_cutoff.strftime('%s')), + "zF:%s" % feed.pk, + int(feed.unread_cutoff.strftime("%s")), int(time.time() + 60 * 60 * 24), ) - missing_story_hashes = set(story_hashes) - set([s['story_hash'] for s in stories]) + missing_story_hashes = set(story_hashes) - set([s["story_hash"] for s in stories]) if missing_story_hashes: missing_stories = MStory.objects( story_feed_id=feed.pk, story_hash__in=missing_story_hashes @@ -1185,7 +1217,7 @@ def count_unreads_for_subscribers(self, feed): missing_stories = Feed.format_stories(missing_stories, feed.pk) stories = missing_stories + stories logging.debug( - ' ---> [%-30s] ~FYFound ~SB~FC%s(of %s)/%s~FY~SN un-secondaried stories while computing scores' + " ---> [%-30s] ~FYFound ~SB~FC%s(of %s)/%s~FY~SN un-secondaried stories while computing scores" % ( feed.log_title[:30], len(missing_stories), @@ -1195,7 +1227,7 @@ def count_unreads_for_subscribers(self, feed): ) cache.set("S:v3:%s" % feed.pk, stories, 60) logging.debug( - ' ---> [%-30s] ~FYComputing scores: ~SB%s stories~SN with ~SB%s subscribers ~SN(%s/%s/%s)' + " ---> [%-30s] ~FYComputing scores: ~SB%s stories~SN with ~SB%s subscribers ~SN(%s/%s/%s)" % ( feed.log_title[:30], len(stories), @@ -1206,16 +1238,16 @@ def count_unreads_for_subscribers(self, feed): ) ) self.calculate_feed_scores_with_stories(user_subs, stories) - elif self.options.get('mongodb_replication_lag'): + elif self.options.get("mongodb_replication_lag"): logging.debug( - ' ---> [%-30s] ~BR~FYSkipping computing scores: ~SB%s seconds~SN of mongodb lag' - % (feed.log_title[:30], self.options.get('mongodb_replication_lag')) + " ---> [%-30s] ~BR~FYSkipping computing scores: ~SB%s seconds~SN of mongodb lag" + % (feed.log_title[:30], self.options.get("mongodb_replication_lag")) ) @timelimit(10) def calculate_feed_scores_with_stories(self, user_subs, stories): for sub in user_subs: - silent = False if getattr(self.options, 'verbose', 0) >= 2 else True + silent = False if getattr(self.options, "verbose", 0) >= 2 else True sub.calculate_feed_scores(silent=silent, stories=stories) @@ -1231,7 +1263,7 @@ def add_jobs(self, feeds_queue, feeds_count=1): self.feeds_count = feeds_count def run_jobs(self): - if self.options['single_threaded'] or self.num_threads == 1: + if self.options["single_threaded"] or self.num_threads == 1: return dispatch_workers(self.feeds_queue[0], self.options) else: for i in range(self.num_threads): diff --git a/utils/feed_functions.py b/utils/feed_functions.py index 104fff0107..fbd93035ef 100644 --- a/utils/feed_functions.py +++ b/utils/feed_functions.py @@ -12,9 +12,13 @@ from utils import log as logging -class TimeoutError(Exception): pass +class TimeoutError(Exception): + pass + + def timelimit(timeout): """borrowed from web.py""" + def _1(function): def _2(*args, **kw): class Dispatch(threading.Thread): @@ -23,7 +27,7 @@ def __init__(self): self.result = None self.error = None self.exc_info = None - + self.setDaemon(True) self.start() @@ -33,28 +37,31 @@ def run(self): except BaseException as e: self.error = e self.exc_info = sys.exc_info() + c = Dispatch() dispatch = c c.join(timeout) if c.is_alive(): - raise TimeoutError('took too long') + raise TimeoutError("took too long") if c.error: - tb = ''.join(traceback.format_exception(c.exc_info[0], c.exc_info[1], c.exc_info[2])) + tb = "".join(traceback.format_exception(c.exc_info[0], c.exc_info[1], c.exc_info[2])) logging.debug(f" ***> Traceback timeout error: {tb}") # mail_admins('Error in timeout: %s' % c.exc_info[0], tb) raise c.error return c.result + return _2 + return _1 - + def utf8encode(tstr): - """ Encodes a unicode string in utf-8 - """ + """Encodes a unicode string in utf-8""" msg = "utf8encode is deprecated. Use django.utils.encoding.smart_str instead." warnings.warn(msg, DeprecationWarning) return smart_str(tstr) + # From: http://www.poromenos.org/node/87 def levenshtein_distance(first, second): """Find the Levenshtein distance between two strings.""" @@ -70,24 +77,25 @@ def levenshtein_distance(first, second): second_length = len(second) + 1 distance_matrix = [[0] * second_length for x in range(first_length)] for i in range(first_length): - distance_matrix[i][0] = i + distance_matrix[i][0] = i for j in range(second_length): - distance_matrix[0][j]=j + distance_matrix[0][j] = j for i in range(1, first_length): for j in range(1, second_length): - deletion = distance_matrix[i-1][j] + 1 - insertion = distance_matrix[i][j-1] + 1 - substitution = distance_matrix[i-1][j-1] - if first[i-1] != second[j-1]: + deletion = distance_matrix[i - 1][j] + 1 + insertion = distance_matrix[i][j - 1] + 1 + substitution = distance_matrix[i - 1][j - 1] + if first[i - 1] != second[j - 1]: substitution += 1 distance_matrix[i][j] = min(insertion, deletion, substitution) - return distance_matrix[first_length-1][second_length-1] - + return distance_matrix[first_length - 1][second_length - 1] + + def _do_timesince(d, chunks, now=None): """ Started as a copy of django.util.timesince.timesince, but modified to only output one time unit, and use months as the maximum unit of measure. - + Takes two datetime objects and returns the time between d and now as a nicely formatted string, e.g. "10 minutes". If d occurs after now, then "0 minutes" is returned. @@ -110,83 +118,86 @@ def _do_timesince(d, chunks, now=None): count = since // seconds if count != 0: break - s = '%(number)d %(type)s' % {'number': count, 'type': name(count)} + s = "%(number)d %(type)s" % {"number": count, "type": name(count)} else: - s = 'just a second' + s = "just a second" return s + def relative_timesince(value): if not value: - return '' + return "" chunks = ( - (60 * 60 * 24, lambda n: ungettext('day', 'days', n)), - (60 * 60, lambda n: ungettext('hour', 'hours', n)), - (60, lambda n: ungettext('minute', 'minutes', n)), - (1, lambda n: ungettext('second', 'seconds', n)), - (0, lambda n: 'just now'), + (60 * 60 * 24, lambda n: ungettext("day", "days", n)), + (60 * 60, lambda n: ungettext("hour", "hours", n)), + (60, lambda n: ungettext("minute", "minutes", n)), + (1, lambda n: ungettext("second", "seconds", n)), + (0, lambda n: "just now"), ) return _do_timesince(value, chunks) - + + def relative_timeuntil(value): if not value: - return '' + return "" chunks = ( - (60 * 60, lambda n: ungettext('hour', 'hours', n)), - (60, lambda n: ungettext('minute', 'minutes', n)) + (60 * 60, lambda n: ungettext("hour", "hours", n)), + (60, lambda n: ungettext("minute", "minutes", n)), ) - + now = datetime.datetime.utcnow() - + return _do_timesince(now, chunks, value) + def seconds_timesince(value): if not value: return 0 now = datetime.datetime.utcnow() delta = now - value - + return delta.days * 24 * 60 * 60 + delta.seconds - + + def format_relative_date(date, future=False): if not date or date < datetime.datetime(2010, 1, 1): return "Soon" - + now = datetime.datetime.utcnow() diff = abs(now - date) if diff < datetime.timedelta(minutes=60): minutes = diff.seconds / 60 - return "%s minute%s %s" % (minutes, - '' if minutes == 1 else 's', - '' if future else 'ago') + return "%s minute%s %s" % (minutes, "" if minutes == 1 else "s", "" if future else "ago") elif datetime.timedelta(minutes=60) <= diff < datetime.timedelta(minutes=90): - return "1 hour %s" % ('' if future else 'ago') + return "1 hour %s" % ("" if future else "ago") elif diff < datetime.timedelta(hours=24): dec = (diff.seconds / 60 + 15) % 60 if dec >= 30: - return "%s.5 hours %s" % ((((diff.seconds / 60) + 15) / 60), - '' if future else 'ago') + return "%s.5 hours %s" % ((((diff.seconds / 60) + 15) / 60), "" if future else "ago") else: - return "%s hours %s" % ((((diff.seconds / 60) + 15) / 60), - '' if future else 'ago') + return "%s hours %s" % ((((diff.seconds / 60) + 15) / 60), "" if future else "ago") else: - days = ((diff.seconds / 60) / 60 / 24) - return "%s day%s %s" % (days, '' if days == 1 else 's', '' if future else 'ago') - -def add_object_to_folder(obj, in_folder, folders, parent='', added=False): - if parent.startswith('river:'): - parent = parent.replace('river:', '') - if in_folder.startswith('river:'): - in_folder = in_folder.replace('river:', '') + days = (diff.seconds / 60) / 60 / 24 + return "%s day%s %s" % (days, "" if days == 1 else "s", "" if future else "ago") + + +def add_object_to_folder(obj, in_folder, folders, parent="", added=False): + if parent.startswith("river:"): + parent = parent.replace("river:", "") + if in_folder.startswith("river:"): + in_folder = in_folder.replace("river:", "") obj_identifier = obj if isinstance(obj, dict): obj_identifier = list(obj.keys())[0] - if ((not in_folder or in_folder == " ") and - not parent and - not isinstance(obj, dict) and - obj_identifier not in folders): + if ( + (not in_folder or in_folder == " ") + and not parent + and not isinstance(obj, dict) + and obj_identifier not in folders + ): folders.append(obj) return folders @@ -198,7 +209,7 @@ def add_object_to_folder(obj, in_folder, folders, parent='', added=False): if obj_identifier not in child_folder_names: folders.append(obj) return folders - + for k, v in enumerate(folders): if isinstance(v, dict): for f_k, f_v in list(v.items()): @@ -206,39 +217,42 @@ def add_object_to_folder(obj, in_folder, folders, parent='', added=False): f_v.append(obj) added = True folders[k][f_k] = add_object_to_folder(obj, in_folder, f_v, f_k, added) - - return folders + + return folders + def mail_feed_error_to_admin(feed, e, local_vars=None, subject=None): # Mail the admins with the error if not subject: subject = "Feed update error" exc_info = sys.exc_info() - subject = '%s: %s' % (subject, repr(e)) - message = 'Traceback:\n%s\n\Feed:\n%s\nLocals:\n%s' % ( - '\n'.join(traceback.format_exception(*exc_info)), + subject = "%s: %s" % (subject, repr(e)) + message = "Traceback:\n%s\n\Feed:\n%s\nLocals:\n%s" % ( + "\n".join(traceback.format_exception(*exc_info)), pprint.pformat(feed.__dict__), - pprint.pformat(local_vars) - ) + pprint.pformat(local_vars), + ) logging.debug(f" ***> Feed error, {subject}: {message}") - + + ## {{{ http://code.activestate.com/recipes/576611/ (r11) from operator import itemgetter from heapq import nlargest from itertools import repeat + class Counter(dict): - '''Dict subclass for counting hashable objects. Sometimes called a bag + """Dict subclass for counting hashable objects. Sometimes called a bag or multiset. Elements are stored as dictionary keys and their counts are stored as dictionary values. >>> Counter('zyzygy') Counter({'y': 3, 'z': 2, 'g': 1}) - ''' + """ def __init__(self, iterable=None, **kwds): - '''Create a new, empty Counter object. And if given, count elements + """Create a new, empty Counter object. And if given, count elements from an input iterable. Or, initialize the count from another mapping of elements to their counts. @@ -247,26 +261,26 @@ def __init__(self, iterable=None, **kwds): >>> c = Counter({'a': 4, 'b': 2}) # a new counter from a mapping >>> c = Counter(a=4, b=2) # a new counter from keyword args - ''' + """ self.update(iterable, **kwds) def __missing__(self, key): return 0 def most_common(self, n=None): - '''List the n most common elements and their counts from the most + """List the n most common elements and their counts from the most common to the least. If n is None, then list all element counts. >>> Counter('abracadabra').most_common(3) [('a', 5), ('r', 2), ('b', 2)] - ''' + """ if n is None: return sorted(iter(list(self.items())), key=itemgetter(1), reverse=True) return nlargest(n, iter(list(self.items())), key=itemgetter(1)) def elements(self): - '''Iterator over elements repeating each as many times as its count. + """Iterator over elements repeating each as many times as its count. >>> c = Counter('ABCABC') >>> sorted(c.elements()) @@ -275,7 +289,7 @@ def elements(self): If an element's count has been set to zero or is a negative number, elements() will ignore it. - ''' + """ for elem, count in list(self.items()): for _ in repeat(None, count): yield elem @@ -284,11 +298,10 @@ def elements(self): @classmethod def fromkeys(cls, iterable, v=None): - raise NotImplementedError( - 'Counter.fromkeys() is undefined. Use Counter(iterable) instead.') + raise NotImplementedError("Counter.fromkeys() is undefined. Use Counter(iterable) instead.") def update(self, iterable=None, **kwds): - '''Like dict.update() but add counts instead of replacing them. + """Like dict.update() but add counts instead of replacing them. Source can be an iterable, a dictionary, or another Counter instance. @@ -299,15 +312,15 @@ def update(self, iterable=None, **kwds): >>> c['h'] # four 'h' in which, witch, and watch 4 - ''' + """ if iterable is not None: - if hasattr(iterable, 'iteritems'): + if hasattr(iterable, "iteritems"): if self: self_get = self.get for elem, count in list(iterable.items()): self[elem] = self_get(elem, 0) + count else: - dict.update(self, iterable) # fast path when counter is empty + dict.update(self, iterable) # fast path when counter is empty else: self_get = self.get for elem in iterable: @@ -316,19 +329,19 @@ def update(self, iterable=None, **kwds): self.update(kwds) def copy(self): - 'Like dict.copy() but returns a Counter instance instead of a dict.' + "Like dict.copy() but returns a Counter instance instead of a dict." return Counter(self) def __delitem__(self, elem): - 'Like dict.__delitem__() but does not raise KeyError for missing values.' + "Like dict.__delitem__() but does not raise KeyError for missing values." if elem in self: dict.__delitem__(self, elem) def __repr__(self): if not self: - return '%s()' % self.__class__.__name__ - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) + return "%s()" % self.__class__.__name__ + items = ", ".join(map("%r: %r".__mod__, self.most_common())) + return "%s({%s})" % (self.__class__.__name__, items) # Multiset-style mathematical operations discussed in: # Knuth TAOCP Volume II section 4.6.3 exercise 19 @@ -340,13 +353,13 @@ def __repr__(self): # c += Counter() def __add__(self, other): - '''Add counts from two counters. + """Add counts from two counters. >>> Counter('abbb') + Counter('bcc') Counter({'b': 4, 'c': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented result = Counter() @@ -357,12 +370,12 @@ def __add__(self, other): return result def __sub__(self, other): - ''' Subtract count, but keep only results with positive counts. + """Subtract count, but keep only results with positive counts. >>> Counter('abbbc') - Counter('bccd') Counter({'b': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented result = Counter() @@ -373,12 +386,12 @@ def __sub__(self, other): return result def __or__(self, other): - '''Union is the maximum of value in either of the input counters. + """Union is the maximum of value in either of the input counters. >>> Counter('abbb') | Counter('bcc') Counter({'b': 3, 'c': 2, 'a': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented _max = max @@ -390,12 +403,12 @@ def __or__(self, other): return result def __and__(self, other): - ''' Intersection is the minimum of corresponding counts. + """Intersection is the minimum of corresponding counts. >>> Counter('abbb') & Counter('bcc') Counter({'b': 1}) - ''' + """ if not isinstance(other, Counter): return NotImplemented _min = min @@ -409,11 +422,13 @@ def __and__(self, other): return result -if __name__ == '__main__': +if __name__ == "__main__": import doctest + print((doctest.testmod())) ## end of http://code.activestate.com/recipes/576611/ }}} + def chunks(l, n): for i in range(0, len(l), n): - yield l[i:i+n] + yield l[i : i + n] diff --git a/utils/feedfinder_forman.py b/utils/feedfinder_forman.py index d543b2ce65..8bccbe1bd4 100755 --- a/utils/feedfinder_forman.py +++ b/utils/feedfinder_forman.py @@ -2,7 +2,6 @@ # -*- coding: utf-8 -*- - __version__ = "0.0.3" try: @@ -30,7 +29,6 @@ def coerce_url(url): class FeedFinder(object): - def __init__(self, user_agent=None): if user_agent is None: user_agent = "NewsBlur Feed Finder" @@ -38,7 +36,9 @@ def __init__(self, user_agent=None): def get_feed(self, url, skip_user_agent=False): try: - r = requests.get(url, headers={"User-Agent": self.user_agent if not skip_user_agent else None}, timeout=15) + r = requests.get( + url, headers={"User-Agent": self.user_agent if not skip_user_agent else None}, timeout=15 + ) except Exception as e: logging.warn("Error while getting '{0}'".format(url)) logging.warn("{0}".format(e)) @@ -51,7 +51,7 @@ def is_feed_data(self, text): data = text.lower() if data and data[:100].count("').replace(''', "'").replace('"', '"').replace('&', '&') + v = ( + v.replace("<", "<") + .replace(">", ">") + .replace("'", "'") + .replace(""", '"') + .replace("&", "&") + ) return v + attrs = [(k.lower(), cleanattr(v)) for k, v in attrs if cleanattr(v)] - attrs = [(k, k in ('rel','type') and v.lower() or v) for k, v in attrs if cleanattr(v)] + attrs = [(k, k in ("rel", "type") and v.lower() or v) for k, v in attrs if cleanattr(v)] return attrs - + def do_base(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'href' not in attrsD: return - self.baseuri = attrsD['href'] - - def error(self, *a, **kw): pass # we're not picky - + if "href" not in attrsD: + return + self.baseuri = attrsD["href"] + + def error(self, *a, **kw): + pass # we're not picky + + class LinkParser(BaseParser): - FEED_TYPES = ('application/rss+xml', - 'text/xml', - 'application/atom+xml', - 'application/x.atom+xml', - 'application/x-atom+xml') + FEED_TYPES = ( + "application/rss+xml", + "text/xml", + "application/atom+xml", + "application/x.atom+xml", + "application/x-atom+xml", + ) + def do_link(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'rel' not in attrsD: return - rels = attrsD['rel'].split() - if 'alternate' not in rels: return - if attrsD.get('type') not in self.FEED_TYPES: return - if 'href' not in attrsD: return - self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href'])) + if "rel" not in attrsD: + return + rels = attrsD["rel"].split() + if "alternate" not in rels: + return + if attrsD.get("type") not in self.FEED_TYPES: + return + if "href" not in attrsD: + return + self.links.append(urllib.parse.urljoin(self.baseuri, attrsD["href"])) + class ALinkParser(BaseParser): def start_a(self, attrs): attrsD = dict(self.normalize_attrs(attrs)) - if 'href' not in attrsD: return - self.links.append(urllib.parse.urljoin(self.baseuri, attrsD['href'])) + if "href" not in attrsD: + return + self.links.append(urllib.parse.urljoin(self.baseuri, attrsD["href"])) + def makeFullURI(uri): - if not uri: return + if not uri: + return uri = uri.strip() - if uri.startswith('feed://'): - uri = 'http://' + uri.split('feed://', 1).pop() - for x in ['http', 'https']: - if uri.startswith('%s://' % x): + if uri.startswith("feed://"): + uri = "http://" + uri.split("feed://", 1).pop() + for x in ["http", "https"]: + if uri.startswith("%s://" % x): return uri - return 'http://%s' % uri + return "http://%s" % uri + def getLinks(data, baseuri): p = LinkParser(baseuri) p.feed(data) return p.links + def getLinksLXML(data, baseuri): parser = etree.HTMLParser(recover=True) tree = etree.parse(StringIO(data), parser) links = [] - for link in tree.findall('.//link'): - if link.attrib.get('type') in LinkParser.FEED_TYPES: - href = link.attrib['href'] - if href: links.append(href) + for link in tree.findall(".//link"): + if link.attrib.get("type") in LinkParser.FEED_TYPES: + href = link.attrib["href"] + if href: + links.append(href) return links + def getALinks(data, baseuri): p = ALinkParser(baseuri) p.feed(data) return p.links + def getLocalLinks(links, baseuri): found_links = [] - if not baseuri: return found_links + if not baseuri: + return found_links baseuri = baseuri.lower() for l in links: try: @@ -198,28 +234,38 @@ def getLocalLinks(links, baseuri): pass return found_links + def isFeedLink(link): - return link[-4:].lower() in ('.rss', '.rdf', '.xml', '.atom') + return link[-4:].lower() in (".rss", ".rdf", ".xml", ".atom") + def isXMLRelatedLink(link): link = link.lower() - return link.count('rss') + link.count('rdf') + link.count('xml') + link.count('atom') + return link.count("rss") + link.count("rdf") + link.count("xml") + link.count("atom") + + +r_brokenRedirect = re.compile("]*>(.*?)", re.S) + -r_brokenRedirect = re.compile(']*>(.*?)', re.S) def tryBrokenRedirect(data): - if ' b) - (a < b) + return (a > b) - (a < b) + def sortFeeds(feed1Info, feed2Info): - return cmp_(feed2Info['headlines_rank'], feed1Info['headlines_rank']) + return cmp_(feed2Info["headlines_rank"], feed1Info["headlines_rank"]) + def getFeedsFromSyndic8(uri): feeds = [] try: - server = xmlrpc.client.Server('http://www.syndic8.com/xmlrpc.php') + server = xmlrpc.client.Server("http://www.syndic8.com/xmlrpc.php") feedids = server.syndic8.FindFeeds(uri) - infolist = server.syndic8.GetFeedInfo(feedids, ['headlines_rank','status','dataurl']) + infolist = server.syndic8.GetFeedInfo(feedids, ["headlines_rank", "status", "dataurl"]) infolist.sort(sortFeeds) - feeds = [f['dataurl'] for f in infolist if f['status']=='Syndicated'] - _debuglog('found %s feeds through Syndic8' % len(feeds)) + feeds = [f["dataurl"] for f in infolist if f["status"] == "Syndicated"] + _debuglog("found %s feeds through Syndic8" % len(feeds)) except: pass return feeds - + + def feeds(uri, all=False, querySyndic8=False, _recurs=None): - if _recurs is None: _recurs = [uri] + if _recurs is None: + _recurs = [uri] fulluri = makeFullURI(uri) try: data = _gatekeeper.get(fulluri, check=False) @@ -261,27 +312,27 @@ def feeds(uri, all=False, querySyndic8=False, _recurs=None): _recurs.append(newuri) return feeds(newuri, all=all, querySyndic8=querySyndic8, _recurs=_recurs) # nope, it's a page, try LINK tags first - _debuglog('looking for LINK tags') + _debuglog("looking for LINK tags") try: outfeeds = getLinks(data, fulluri) except: outfeeds = [] if not outfeeds: - _debuglog('using lxml to look for LINK tags') + _debuglog("using lxml to look for LINK tags") try: outfeeds = getLinksLXML(data, fulluri) except: outfeeds = [] - _debuglog('found %s feeds through LINK tags' % len(outfeeds)) + _debuglog("found %s feeds through LINK tags" % len(outfeeds)) outfeeds = list(filter(isFeed, outfeeds)) if all or not outfeeds: # no LINK tags, look for regular links that point to feeds - _debuglog('no LINK tags, looking at A tags') + _debuglog("no LINK tags, looking at A tags") try: links = getALinks(data, fulluri) except: links = [] - _debuglog('no LINK tags, looking at local links') + _debuglog("no LINK tags, looking at local links") locallinks = getLocalLinks(links, fulluri) # look for obvious feed links on the same server outfeeds.extend(list(filter(isFeed, list(filter(isFeedLink, locallinks))))) @@ -295,82 +346,89 @@ def feeds(uri, all=False, querySyndic8=False, _recurs=None): # look harder for feed links on another server outfeeds.extend(list(filter(isFeed, list(filter(isXMLRelatedLink, links))))) if all or not outfeeds: - _debuglog('no A tags, guessing') - suffixes = [ # filenames used by popular software: - 'feed/', # obvious - 'atom.xml', # blogger, TypePad - 'index.atom', # MT, apparently - 'index.rdf', # MT - 'rss.xml', # Dave Winer/Manila - 'index.xml', # MT - 'index.rss' # Slash + _debuglog("no A tags, guessing") + suffixes = [ # filenames used by popular software: + "feed/", # obvious + "atom.xml", # blogger, TypePad + "index.atom", # MT, apparently + "index.rdf", # MT + "rss.xml", # Dave Winer/Manila + "index.xml", # MT + "index.rss", # Slash ] outfeeds.extend(list(filter(isFeed, [urllib.parse.urljoin(fulluri, x) for x in suffixes]))) if (all or not outfeeds) and querySyndic8: # still no luck, search Syndic8 for feeds (requires xmlrpclib) - _debuglog('still no luck, searching Syndic8') + _debuglog("still no luck, searching Syndic8") outfeeds.extend(getFeedsFromSyndic8(uri)) - if hasattr(__builtins__, 'set') or 'set' in __builtins__: + if hasattr(__builtins__, "set") or "set" in __builtins__: outfeeds = list(set(outfeeds)) return outfeeds -getFeeds = feeds # backwards-compatibility + +getFeeds = feeds # backwards-compatibility + def feed(uri): - #todo: give preference to certain feed formats + # todo: give preference to certain feed formats feedlist = feeds(uri) if feedlist: - feeds_no_comments = [f for f in feedlist if 'comments' not in f.lower()] + feeds_no_comments = [f for f in feedlist if "comments" not in f.lower()] if feeds_no_comments: return feeds_no_comments[0] return feedlist[0] else: return None + ##### test harness ###### + def test(): - uri = 'http://diveintomark.org/tests/client/autodiscovery/html4-001.html' + uri = "http://diveintomark.org/tests/client/autodiscovery/html4-001.html" failed = [] count = 0 while 1: data = _gatekeeper.get(uri) - if data.find('Atom autodiscovery test') == -1: break - sys.stdout.write('.') + if data.find("Atom autodiscovery test") == -1: + break + sys.stdout.write(".") sys.stdout.flush() count += 1 links = getLinks(data, uri) if not links: - print(('\n*** FAILED ***', uri, 'could not find link')) + print(("\n*** FAILED ***", uri, "could not find link")) failed.append(uri) elif len(links) > 1: - print(('\n*** FAILED ***', uri, 'found too many links')) + print(("\n*** FAILED ***", uri, "found too many links")) failed.append(uri) else: atomdata = urllib.request.urlopen(links[0]).read() if atomdata.find(' 2 else "1" droplet_index = int(second_arg) if str(second_arg).isnumeric() else 1 droplet_name = sys.argv[1] # Use correct Digital Ocean team based on "old" - commands = ['ansible-inventory', '--list'] + commands = ["ansible-inventory", "--list"] env = None if second_arg == "old": env = dict(os.environ, ANSIBLE_CONFIG="ansible.old.cfg") @@ -26,7 +27,7 @@ print(" ***> Could not load ansible-inventory!") hosts = json.loads(hosts) - for host, ip_host in hosts['_meta']['hostvars'].items(): + for host, ip_host in hosts["_meta"]["hostvars"].items(): if host.startswith(droplet_name): - print(ip_host['ansible_host']) + print(ip_host["ansible_host"]) break diff --git a/utils/image_functions.py b/utils/image_functions.py index 0b1f5a4f27..fb5448c734 100644 --- a/utils/image_functions.py +++ b/utils/image_functions.py @@ -7,61 +7,59 @@ from PIL.ExifTags import TAGS from io import BytesIO -PROFILE_PICTURE_SIZES = { - 'fullsize': (256, 256), - 'thumbnail': (64, 64) -} +PROFILE_PICTURE_SIZES = {"fullsize": (256, 256), "thumbnail": (64, 64)} + class ImageOps: - """Module that holds all image operations. Since there's no state, + """Module that holds all image operations. Since there's no state, everything is a classmethod.""" - + @classmethod def resize_image(cls, image_body, size, fit_to_size=False): """Takes a raw image (in image_body) and resizes it to fit given - dimensions. Returns a file-like object in the form of a StringIO. - This must happen in this function because PIL is transforming the + dimensions. Returns a file-like object in the form of a StringIO. + This must happen in this function because PIL is transforming the original as it works.""" - + image_file = BytesIO(image_body) try: image = Image.open(image_file) except IOError: # Invalid image file return False - + # Get the image format early, as we lose it after perform a `thumbnail` or `fit`. format = image.format - + # Check for rotation image = cls.adjust_image_orientation(image) - + if not fit_to_size: image.thumbnail(PROFILE_PICTURE_SIZES[size], Image.ANTIALIAS) else: - image = PILOps.fit(image, PROFILE_PICTURE_SIZES[size], - method=Image.ANTIALIAS, - centering=(0.5, 0.5)) - + image = PILOps.fit( + image, PROFILE_PICTURE_SIZES[size], method=Image.ANTIALIAS, centering=(0.5, 0.5) + ) + output = BytesIO() - if format.lower() == 'jpg': - format = 'jpeg' + if format.lower() == "jpg": + format = "jpeg" image.save(output, format=format, quality=95) - + return output - + @classmethod def adjust_image_orientation(cls, image): """Since the iPhone will store an image on its side but with EXIF data stating that it should be rotated, we need to find that EXIF data and correctly rotate the image before storage.""" - - if hasattr(image, '_getexif'): + + if hasattr(image, "_getexif"): exif = image._getexif() if exif: for tag, value in list(exif.items()): decoded = TAGS.get(tag, tag) - if decoded == 'Orientation': + if decoded == "Orientation": if value == 6: image = image.rotate(-90) if value == 8: @@ -70,14 +68,15 @@ def adjust_image_orientation(cls, image): image = image.rotate(180) break return image - + @classmethod def image_size(cls, url, headers=None): - if not headers: headers = {} + if not headers: + headers = {} req = urllib.request.Request(url, data=None, headers=headers) file = urllib.request.urlopen(req) size = file.headers.get("content-length") - if size: + if size: size = int(size) p = ImageFile.Parser() while True: diff --git a/utils/jennyholzer.py b/utils/jennyholzer.py index f03f8160dc..ce845a6cf2 100644 --- a/utils/jennyholzer.py +++ b/utils/jennyholzer.py @@ -3,7 +3,7 @@ # it is not because they are wrong, just that they may be considered # controversial. I'd rather err on the side of safety, which is contrary # to the trusim: "playing it safe can cause a lot of damage in the long run". -# +# # We'll see where this goes. This is an experiment. - Sam, July 6th, 2012 @@ -261,4 +261,4 @@ "you should study as much as possible", # "your actions are pointless if no one notices", # "your oldest fears are the worst ones", -] \ No newline at end of file +] diff --git a/utils/json_fetcher.py b/utils/json_fetcher.py index 08a1befdba..d94371c329 100644 --- a/utils/json_fetcher.py +++ b/utils/json_fetcher.py @@ -5,57 +5,56 @@ from utils import log as logging from utils.json_functions import decode + class JSONFetcher: - def __init__(self, feed, options=None): self.feed = feed self.options = options or {} - + def fetch(self, address, raw_feed): if not address: address = self.feed.feed_address - + json_feed = decode(raw_feed.content) if not json_feed: - logging.debug(' ***> [%-30s] ~FRJSON fetch failed: %s' % - (self.feed.log_title[:30], address)) + logging.debug(" ***> [%-30s] ~FRJSON fetch failed: %s" % (self.feed.log_title[:30], address)) return data = {} - data['title'] = json_feed.get('title', '[Untitled]') - data['link'] = json_feed.get('home_page_url', "") - data['description'] = json_feed.get('title', "") - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur JSON Feed - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = json_feed.get('feed_url') - + data["title"] = json_feed.get("title", "[Untitled]") + data["link"] = json_feed.get("home_page_url", "") + data["description"] = json_feed.get("title", "") + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur JSON Feed - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = json_feed.get("feed_url") + rss = feedgenerator.Atom1Feed(**data) - for item in json_feed.get('items', []): + for item in json_feed.get("items", []): story_data = self.json_feed_story(item) rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def json_feed_story(self, item): date_published = datetime.datetime.now() - pubdate = item.get('date_published', None) + pubdate = item.get("date_published", None) if pubdate: date_published = dateutil.parser.parse(pubdate) - authors = item.get('authors', item.get('author', {})) + authors = item.get("authors", item.get("author", {})) if isinstance(authors, list): - author_name = ', '.join([author.get('name', "") for author in authors]) + author_name = ", ".join([author.get("name", "") for author in authors]) else: - author_name = authors.get('name', "") + author_name = authors.get("name", "") story = { - 'title': item.get('title', ""), - 'link': item.get('external_url', item.get('url', "")), - 'description': item.get('content_html', item.get('content_text', "")), - 'author_name': author_name, - 'categories': item.get('tags', []), - 'unique_id': str(item.get('id', item.get('url', ""))), - 'pubdate': date_published, + "title": item.get("title", ""), + "link": item.get("external_url", item.get("url", "")), + "description": item.get("content_html", item.get("content_text", "")), + "author_name": author_name, + "categories": item.get("tags", []), + "unique_id": str(item.get("id", item.get("url", ""))), + "pubdate": date_published, } - + return story diff --git a/utils/json_functions.py b/utils/json_functions.py index 1b04ed0abf..0cf5fa4e30 100644 --- a/utils/json_functions.py +++ b/utils/json_functions.py @@ -1,4 +1,4 @@ -#-*- coding: utf-8 -*- +# -*- coding: utf-8 -*- from django.db import models from django.utils.functional import Promise from django.utils.encoding import force_text, smart_str @@ -8,6 +8,7 @@ from django.conf import settings from django.http import HttpResponse, HttpResponseForbidden, Http404 from django.db.models.query import QuerySet + # from django.utils.deprecation import CallableBool from mongoengine.queryset.queryset import QuerySet as MongoQuerySet from bson.objectid import ObjectId @@ -42,7 +43,7 @@ def _any(data): # Opps, we used to check if it is of type list, but that fails # i.e. in the case of django.newforms.utils.ErrorList, which extends # the type "list". Oh man, that was a dumb mistake! - if hasattr(data, 'canonical'): + if hasattr(data, "canonical"): ret = _any(data.canonical()) elif isinstance(data, list): ret = _list(data) @@ -66,7 +67,7 @@ def _any(data): ret = _model(data) # here we need to encode the string as unicode (otherwise we get utf-16 in the json-response) elif isinstance(data, bytes): - ret = data.decode('utf-8', 'ignore') + ret = data.decode("utf-8", "ignore") elif isinstance(data, str): ret = smart_str(data) elif isinstance(data, Exception): @@ -76,7 +77,7 @@ def _any(data): ret = force_text(data) elif isinstance(data, datetime.datetime) or isinstance(data, datetime.date): ret = str(data) - elif hasattr(data, 'to_json'): + elif hasattr(data, "to_json"): ret = data.to_json() else: ret = data @@ -106,7 +107,7 @@ def _dict(data): ret[str(k)] = _any(v) return ret - if hasattr(data, 'to_json'): + if hasattr(data, "to_json"): data = data.to_json() ret = _any(data) return json.dumps(ret) @@ -132,12 +133,12 @@ def json_response(request, response=None): try: if isinstance(response, dict): response = dict(response) - if 'result' not in response: - response['result'] = 'ok' + if "result" not in response: + response["result"] = "ok" authenticated = request.user.is_authenticated - response['authenticated'] = authenticated + response["authenticated"] = authenticated if authenticated: - response['user_id'] = request.user.pk + response["user_id"] = request.user.pk except KeyboardInterrupt: # Allow keyboard interrupts through for debugging. raise @@ -146,28 +147,28 @@ def json_response(request, response=None): except Exception as e: # Mail the admins with the error exc_info = sys.exc_info() - subject = 'JSON view error: %s' % request.path + subject = "JSON view error: %s" % request.path try: request_repr = repr(request) except: - request_repr = 'Request repr() unavailable' + request_repr = "Request repr() unavailable" import traceback - message = 'Traceback:\n%s\n\nRequest:\n%s' % ( - '\n'.join(traceback.format_exception(*exc_info)), + + message = "Traceback:\n%s\n\nRequest:\n%s" % ( + "\n".join(traceback.format_exception(*exc_info)), request_repr, - ) + ) - response = {'result': 'error', - 'text': str(e)} + response = {"result": "error", "text": str(e)} code = 500 if not settings.DEBUG: logging.debug(f" ***> JSON exception {subject}: {message}") - logging.debug('\n'.join(traceback.format_exception(*exc_info))) + logging.debug("\n".join(traceback.format_exception(*exc_info))) else: - print('\n'.join(traceback.format_exception(*exc_info))) + print("\n".join(traceback.format_exception(*exc_info))) json = json_encode(response) - return HttpResponse(json, content_type='application/json; charset=utf-8', status=code) + return HttpResponse(json, content_type="application/json; charset=utf-8", status=code) def main(): @@ -182,5 +183,5 @@ def main(): print(test, json_test) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/utils/log.py b/utils/log.py index 762b1d1acf..de81fb0086 100644 --- a/utils/log.py +++ b/utils/log.py @@ -16,7 +16,7 @@ def emit(self, record): def getlogger(): - logger = logging.getLogger('newsblur') + logger = logging.getLogger("newsblur") return logger @@ -25,7 +25,7 @@ def user(u, msg, request=None, warn_color=True): if not u: return debug(msg) - platform = '------' + platform = "------" time_elapsed = "" if isinstance(u, WSGIRequest) or request: if not request: @@ -33,24 +33,24 @@ def user(u, msg, request=None, warn_color=True): u = request.user platform = extract_user_agent(request) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time - color = '~FB' + color = "~FB" if warn_color: if seconds >= 5: - color = '~FR' + color = "~FR" elif seconds > 1: - color = '~SB~FK' + color = "~SB~FK" time_elapsed = "[%s%.4ss~SB] " % ( color, seconds, ) is_premium = u.is_authenticated and u.profile.is_premium - premium = '*' if is_premium else '' + premium = "*" if is_premium else "" if is_premium and u.profile.is_archive: premium = "^" username = cipher(str(u)) if settings.CIPHER_USERNAMES else str(u) - info(' ---> [~FB~SN%-6s~SB] %s[%s%s] %s' % (platform, time_elapsed, username, premium, msg)) + info(" ---> [~FB~SN%-6s~SB] %s[%s%s] %s" % (platform, time_elapsed, username, premium, msg)) def cipher(msg): @@ -82,91 +82,97 @@ def error(msg): def colorize(msg): params = { - r'\-\-\->' : '~FB~SB--->~FW', - r'\*\*\*>' : '~FB~SB~BB--->~BT~FW', - r'\[' : '~SB~FB[~SN~FM', - r'AnonymousUser' : '~FBAnonymousUser', - r'\*\]' : r'~SN~FR*~FB~SB]', - r'\^\]' : r'~SN~FR^~FB~SB]', - r'\]' : '~FB~SB]~FW~SN', + r"\-\-\->": "~FB~SB--->~FW", + r"\*\*\*>": "~FB~SB~BB--->~BT~FW", + r"\[": "~SB~FB[~SN~FM", + r"AnonymousUser": "~FBAnonymousUser", + r"\*\]": r"~SN~FR*~FB~SB]", + r"\^\]": r"~SN~FR^~FB~SB]", + r"\]": "~FB~SB]~FW~SN", } colors = { - '~SB' : Style.BRIGHT, - '~SN' : Style.NORMAL, - '~SK' : Style.BLINK, - '~SU' : Style.UNDERLINE, - '~ST' : Style.RESET_ALL, - '~FK': Fore.BLACK, - '~FR': Fore.RED, - '~FG': Fore.GREEN, - '~FY': Fore.YELLOW, - '~FB': Fore.BLUE, - '~FM': Fore.MAGENTA, - '~FC': Fore.CYAN, - '~FW': Fore.WHITE, - '~FT': Fore.RESET, - '~BK': Back.BLACK, - '~BR': Back.RED, - '~BG': Back.GREEN, - '~BY': Back.YELLOW, - '~BB': Back.BLUE, - '~BM': Back.MAGENTA, - '~BC': Back.CYAN, - '~BW': Back.WHITE, - '~BT': Back.RESET, + "~SB": Style.BRIGHT, + "~SN": Style.NORMAL, + "~SK": Style.BLINK, + "~SU": Style.UNDERLINE, + "~ST": Style.RESET_ALL, + "~FK": Fore.BLACK, + "~FR": Fore.RED, + "~FG": Fore.GREEN, + "~FY": Fore.YELLOW, + "~FB": Fore.BLUE, + "~FM": Fore.MAGENTA, + "~FC": Fore.CYAN, + "~FW": Fore.WHITE, + "~FT": Fore.RESET, + "~BK": Back.BLACK, + "~BR": Back.RED, + "~BG": Back.GREEN, + "~BY": Back.YELLOW, + "~BB": Back.BLUE, + "~BM": Back.MAGENTA, + "~BC": Back.CYAN, + "~BW": Back.WHITE, + "~BT": Back.RESET, } for k, v in list(params.items()): msg = re.sub(k, v, msg) - msg = msg + '~ST~FW~BT' + msg = msg + "~ST~FW~BT" # msg = re.sub(r'(~[A-Z]{2})', r'%(\1)s', msg) for k, v in list(colors.items()): msg = msg.replace(k, v) return msg - -''' + + +""" This module generates ANSI character codes to printing colors to terminals. See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' +""" + +COLOR_ESC = "\033[" -COLOR_ESC = '\033[' class AnsiCodes(object): def __init__(self, codes): for name in dir(codes): - if not name.startswith('_'): + if not name.startswith("_"): value = getattr(codes, name) - setattr(self, name, COLOR_ESC + str(value) + 'm') + setattr(self, name, COLOR_ESC + str(value) + "m") + class AnsiFore: - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 + CYAN = 36 + WHITE = 37 + RESET = 39 + class AnsiBack: - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 + CYAN = 46 + WHITE = 47 + RESET = 49 + class AnsiStyle: - BRIGHT = 1 - DIM = 2 + BRIGHT = 1 + DIM = 2 UNDERLINE = 4 - BLINK = 5 - NORMAL = 22 + BLINK = 5 + NORMAL = 22 RESET_ALL = 0 + Fore = AnsiCodes(AnsiFore) Back = AnsiCodes(AnsiBack) Style = AnsiCodes(AnsiStyle) diff --git a/utils/management_functions.py b/utils/management_functions.py index 914f04e63d..e49c231f49 100644 --- a/utils/management_functions.py +++ b/utils/management_functions.py @@ -1,17 +1,18 @@ import os import errno + def daemonize(): """ Detach from the terminal and continue as a daemon. """ # swiped from twisted/scripts/twistd.py # See http://www.erlenstar.demon.co.uk/unix/faq_toc.html#TOC16 - if os.fork(): # launch child and... - os._exit(0) # kill off parent + if os.fork(): # launch child and... + os._exit(0) # kill off parent os.setsid() - if os.fork(): # launch child and... - os._exit(0) # kill off parent again. + if os.fork(): # launch child and... + os._exit(0) # kill off parent again. os.umask(0o77) null = os.open("/dev/null", os.O_RDWR) for i in range(3): @@ -20,4 +21,4 @@ def daemonize(): except OSError as e: if e.errno != errno.EBADF: raise - os.close(null) \ No newline at end of file + os.close(null) diff --git a/utils/mongo_command_monitor.py b/utils/mongo_command_monitor.py index 84b677a77a..108fcfe99e 100644 --- a/utils/mongo_command_monitor.py +++ b/utils/mongo_command_monitor.py @@ -3,8 +3,8 @@ from django.conf import settings from django.db import connection -class MongoCommandLogger(monitoring.CommandListener): +class MongoCommandLogger(monitoring.CommandListener): def __init__(self): self.seen_request_ids = dict() @@ -24,13 +24,13 @@ def succeeded(self, event): op = event.command_name collection = command_dict[op] - command_filter = command_dict.get('filter', None) - command_documents = command_dict.get('documents', None) - command_indexes = command_dict.get('indexes', None) - command_insert = command_dict.get('updates', None) - command_update = command_dict.get('updates', None) - command_sort = command_dict.get('sort', None) - command_get_more = command_dict.get('getMore', None) + command_filter = command_dict.get("filter", None) + command_documents = command_dict.get("documents", None) + command_indexes = command_dict.get("indexes", None) + command_insert = command_dict.get("updates", None) + command_update = command_dict.get("updates", None) + command_sort = command_dict.get("sort", None) + command_get_more = command_dict.get("getMore", None) if command_sort: command_sort = dict(command_sort) @@ -55,19 +55,17 @@ def succeeded(self, event): if op == "insert" or op == "update": op = f"~SB{op}" - - message = { - "op": op, - "query": query, - "collection": collection - } - - if not getattr(connection, 'queriesx', False): + + message = {"op": op, "query": query, "collection": collection} + + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - 'mongo': message, - 'time': '%.6f' % (int(event.duration_micros) / 1000000), - }) + connection.queriesx.append( + { + "mongo": message, + "time": "%.6f" % (int(event.duration_micros) / 1000000), + } + ) # logging.info("Command {0.command_name} with request id " # "{0.request_id} on server {0.connection_id} " @@ -75,18 +73,21 @@ def succeeded(self, event): # "microseconds".format(event)) def failed(self, event): - logging.info("Command {0.command_name} with request id " - "{0.request_id} on server {0.connection_id} " - "failed in {0.duration_micros} " - "microseconds".format(event)) + logging.info( + "Command {0.command_name} with request id " + "{0.request_id} on server {0.connection_id} " + "failed in {0.duration_micros} " + "microseconds".format(event) + ) def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) - + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) + def process_celery(self, profiler): - if not self.activated(profiler): return + if not self.activated(profiler): + return connection.queriesx = [] diff --git a/utils/mongo_raw_log_middleware.py b/utils/mongo_raw_log_middleware.py index 780e5b39bd..5d23e668d6 100644 --- a/utils/mongo_raw_log_middleware.py +++ b/utils/mongo_raw_log_middleware.py @@ -10,41 +10,47 @@ import pymongo from bson.errors import InvalidBSON -class MongoDumpMiddleware(object): +class MongoDumpMiddleware(object): def __init__(self, get_response=None): self.get_response = get_response def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) - + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) + def process_view(self, request, callback, callback_args, callback_kwargs): - if not self.activated(request): return + if not self.activated(request): + return self._used_msg_ids = [] - if not getattr(MongoClient, '_logging', False): + if not getattr(MongoClient, "_logging", False): # save old methods - setattr(MongoClient, '_logging', True) - if hasattr(MongoClient, '_send_message_with_response'): + setattr(MongoClient, "_logging", True) + if hasattr(MongoClient, "_send_message_with_response"): connection.queriesx = [] - MongoClient._send_message_with_response = \ - self._instrument(MongoClient._send_message_with_response) - MongoReplicaSetClient._send_message_with_response = \ - self._instrument(MongoReplicaSetClient._send_message_with_response) + MongoClient._send_message_with_response = self._instrument( + MongoClient._send_message_with_response + ) + MongoReplicaSetClient._send_message_with_response = self._instrument( + MongoReplicaSetClient._send_message_with_response + ) return None def process_celery(self, profiler): - if not self.activated(profiler): return + if not self.activated(profiler): + return self._used_msg_ids = [] - if not getattr(MongoClient, '_logging', False): + if not getattr(MongoClient, "_logging", False): # save old methods - setattr(MongoClient, '_logging', True) - if hasattr(MongoClient, '_send_message_with_response'): - MongoClient._send_message_with_response = \ - self._instrument(MongoClient._send_message_with_response) - MongoReplicaSetClient._send_message_with_response = \ - self._instrument(MongoReplicaSetClient._send_message_with_response) + setattr(MongoClient, "_logging", True) + if hasattr(MongoClient, "_send_message_with_response"): + MongoClient._send_message_with_response = self._instrument( + MongoClient._send_message_with_response + ) + MongoReplicaSetClient._send_message_with_response = self._instrument( + MongoReplicaSetClient._send_message_with_response + ) return None def process_response(self, request, response): @@ -56,20 +62,23 @@ def instrumented_method(*args, **kwargs): query = args[1].get_message(False, sock_info, False) message = _mongodb_decode_wire_protocol(query[1]) # message = _mongodb_decode_wire_protocol(args[1][1]) - if not message or message['msg_id'] in self._used_msg_ids: + if not message or message["msg_id"] in self._used_msg_ids: return original_method(*args, **kwargs) - self._used_msg_ids.append(message['msg_id']) + self._used_msg_ids.append(message["msg_id"]) start = time() result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - 'mongo': message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + "mongo": message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method def __call__(self, request): @@ -78,34 +87,40 @@ def __call__(self, request): return response + def _mongodb_decode_wire_protocol(message): - """ http://www.mongodb.org/display/DOCS/Mongo+Wire+Protocol """ + """http://www.mongodb.org/display/DOCS/Mongo+Wire+Protocol""" MONGO_OPS = { - 1000: 'msg', - 2001: 'update', - 2002: 'insert', - 2003: 'reserved', - 2004: 'query', - 2005: 'get_more', - 2006: 'delete', - 2007: 'kill_cursors', + 1000: "msg", + 2001: "update", + 2002: "insert", + 2003: "reserved", + 2004: "query", + 2005: "get_more", + 2006: "delete", + 2007: "kill_cursors", } - _, msg_id, _, opcode, _ = struct.unpack(' 90: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Disk Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s hit %s%% disk usage!" % (hostname, percent), - "text": "Usage on %s: %s" % (hostname, disk_usage_output)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Disk Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s hit %s%% disk usage!" % (hostname, percent), + "text": "Usage on %s: %s" % (hostname, disk_usage_output), + }, + ) print(" ---> Disk usage is NOT fine: %s / %s%% used" % (hostname, percent)) else: print(" ---> Disk usage is fine: %s / %s%% used" % (hostname, percent)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_newsletter_delivery.py b/utils/monitor_newsletter_delivery.py index 35f30a72fa..c1ef3a57cb 100755 --- a/utils/monitor_newsletter_delivery.py +++ b/utils/monitor_newsletter_delivery.py @@ -1,35 +1,44 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings import socket + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] - r = requests.get("https://api.mailgun.net/v3/newsletters.newsblur.com/stats/total", - auth=("api", settings.MAILGUN_ACCESS_KEY), - params={"event": ["accepted", "delivered", "failed"], - "duration": "2h"}) - stats = r.json()['stats'][0] - delivered = stats['delivered']['total'] - accepted = stats['delivered']['total'] - bounced = stats['failed']['permanent']['total'] + stats['failed']['temporary']['total'] + r = requests.get( + "https://api.mailgun.net/v3/newsletters.newsblur.com/stats/total", + auth=("api", settings.MAILGUN_ACCESS_KEY), + params={"event": ["accepted", "delivered", "failed"], "duration": "2h"}, + ) + stats = r.json()["stats"][0] + delivered = stats["delivered"]["total"] + accepted = stats["delivered"]["total"] + bounced = stats["failed"]["permanent"]["total"] + stats["failed"]["temporary"]["total"] if bounced / float(delivered) > 0.5: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Newsletter Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s newsletters bounced (2h): %s/%s accepted/delivered -> %s bounced" % (hostname, accepted, delivered, bounced), - "text": "Newsletters are not being delivered! %s delivered, %s bounced" % (delivered, bounced)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Newsletter Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s newsletters bounced (2h): %s/%s accepted/delivered -> %s bounced" + % (hostname, accepted, delivered, bounced), + "text": "Newsletters are not being delivered! %s delivered, %s bounced" + % (delivered, bounced), + }, + ) print(" ---> %s newsletters bounced: %s > %s > %s" % (hostname, accepted, delivered, bounced)) else: print(" ---> %s newsletters OK: %s > %s > %s" % (hostname, accepted, delivered, bounced)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_redis_bgsave.py b/utils/monitor_redis_bgsave.py index c70893f3ad..c23769ba14 100755 --- a/utils/monitor_redis_bgsave.py +++ b/utils/monitor_redis_bgsave.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import os import datetime @@ -9,25 +10,30 @@ from newsblur_web import settings import socket + def main(): redis_log_path = sys.argv[1] - t = os.popen('stat -c%Y /srv/newsblur/docker/volumes/redis/') - timestamp = t.read().split('\n')[0] + t = os.popen("stat -c%Y /srv/newsblur/docker/volumes/redis/") + timestamp = t.read().split("\n")[0] modified = datetime.datetime.fromtimestamp(int(timestamp)) hostname = socket.gethostname() modified_minutes = datetime.datetime.now() - modified log_tail = os.popen(f"tail -n 100 {redis_log_path}").read() if True: - #if modified < ten_min_ago: + # if modified < ten_min_ago: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Redis Monitor: %s " % (hostname, hostname), - "to": [settings.ADMINS[0][1]], - "subject": "%s hasn't bgsave'd redis in %s!" % (hostname, modified_minutes), - "text": "Last modified %s: %s ago\n\n----\n\n%s" % (hostname, modified_minutes, log_tail)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Redis Monitor: %s " % (hostname, hostname), + "to": [settings.ADMINS[0][1]], + "subject": "%s hasn't bgsave'd redis in %s!" % (hostname, modified_minutes), + "text": "Last modified %s: %s ago\n\n----\n\n%s" % (hostname, modified_minutes, log_tail), + }, + ) else: print(" ---> Redis bgsave fine: %s / %s ago" % (hostname, modified_minutes)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_task_fetches.py b/utils/monitor_task_fetches.py index 7e0447fd8c..56e4d967e2 100755 --- a/utils/monitor_task_fetches.py +++ b/utils/monitor_task_fetches.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings @@ -9,6 +10,7 @@ import redis import pymongo + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] @@ -20,33 +22,41 @@ def main(): r = redis.Redis(connection_pool=settings.REDIS_ANALYTICS_POOL) try: - client = pymongo.MongoClient(f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.MONGO_DB['host']}/?authSource=admin") - feeds_fetched = client.newsblur.statistics.find_one({"key": "feeds_fetched"})['value'] + client = pymongo.MongoClient( + f"mongodb://{settings.MONGO_DB['username']}:{settings.MONGO_DB['password']}@{settings.MONGO_DB['host']}/?authSource=admin" + ) + feeds_fetched = client.newsblur.statistics.find_one({"key": "feeds_fetched"})["value"] redis_task_fetches = int(r.get(monitor_key) or 0) except Exception as e: failed = e - + if feeds_fetched < 5000000 and not failed: if redis_task_fetches > 0 and feeds_fetched < (redis_task_fetches - FETCHES_DROP_AMOUNT): failed = True - # Ignore 0's below, as they simply imply low number, not falling + # Ignore 0's below, as they simply imply low number, not falling # elif redis_task_fetches <= 0: # failed = True if failed: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Task Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s feeds fetched falling: %s (from %s)" % (hostname, feeds_fetched, redis_task_fetches), - "text": "Feed fetches are falling: %s (from %s) %s" % (feeds_fetched, redis_task_fetches, failed)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Task Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s feeds fetched falling: %s (from %s)" + % (hostname, feeds_fetched, redis_task_fetches), + "text": "Feed fetches are falling: %s (from %s) %s" + % (feeds_fetched, redis_task_fetches, failed), + }, + ) r.set(monitor_key, feeds_fetched) - r.expire(monitor_key, 60*60*12) # 3 hours + r.expire(monitor_key, 60 * 60 * 12) # 3 hours print(" ---> Feeds fetched falling! %s %s" % (feeds_fetched, failed)) else: print(" ---> Feeds fetched OK: %s" % (feeds_fetched)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/monitor_work_queue.py b/utils/monitor_work_queue.py index 1c4ba22382..df95cb1f22 100755 --- a/utils/monitor_work_queue.py +++ b/utils/monitor_work_queue.py @@ -1,7 +1,8 @@ #!/usr/local/bin/python3 import sys -sys.path.append('/srv/newsblur') + +sys.path.append("/srv/newsblur") import requests from newsblur_web import settings @@ -9,6 +10,7 @@ import redis import pymongo + def main(): hostname = socket.gethostname() admin_email = settings.ADMINS[0][1] @@ -25,25 +27,30 @@ def main(): redis_work_queue = int(r_monitor.get(monitor_key) or 0) except Exception as e: failed = e - + if work_queue_size > 300 and work_queue_size > (redis_work_queue + QUEUE_DROP_AMOUNT): failed = True if failed: requests.post( - "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, - auth=("api", settings.MAILGUN_ACCESS_KEY), - data={"from": "NewsBlur Queue Monitor: %s " % (hostname, hostname), - "to": [admin_email], - "subject": "%s work queue rising: %s (from %s)" % (hostname, work_queue_size, redis_work_queue), - "text": "Work queue is rising: %s (from %s) %s" % (work_queue_size, redis_work_queue, failed)}) + "https://api.mailgun.net/v2/%s/messages" % settings.MAILGUN_SERVER_NAME, + auth=("api", settings.MAILGUN_ACCESS_KEY), + data={ + "from": "NewsBlur Queue Monitor: %s " % (hostname, hostname), + "to": [admin_email], + "subject": "%s work queue rising: %s (from %s)" + % (hostname, work_queue_size, redis_work_queue), + "text": "Work queue is rising: %s (from %s) %s" % (work_queue_size, redis_work_queue, failed), + }, + ) r_monitor.set(monitor_key, work_queue_size) - r_monitor.expire(monitor_key, 60*60*3) # 3 hours + r_monitor.expire(monitor_key, 60 * 60 * 3) # 3 hours print(" ---> Work queue rising! %s %s" % (work_queue_size, failed)) else: print(" ---> Work queue OK: %s" % (work_queue_size)) - -if __name__ == '__main__': + + +if __name__ == "__main__": main() diff --git a/utils/munin/base.py b/utils/munin/base.py index 203e24b314..1f663c7058 100644 --- a/utils/munin/base.py +++ b/utils/munin/base.py @@ -1,22 +1,21 @@ import sys -class MuninGraph(object): +class MuninGraph(object): def run(self): cmd_name = None if len(sys.argv) > 1: cmd_name = sys.argv[1] - if cmd_name == 'config': + if cmd_name == "config": self.print_config() - else: + else: metrics = self.calculate_metrics() self.print_metrics(metrics) - + def print_config(self): - for key,value in self.graph_config.items(): - print('%s %s' % (key, value)) + for key, value in self.graph_config.items(): + print("%s %s" % (key, value)) def print_metrics(self, metrics): for key, value in metrics.items(): - print('%s.value %s' % (key, value)) - \ No newline at end of file + print("%s.value %s" % (key, value)) diff --git a/utils/munin/newsblur_app_servers.py b/utils/munin/newsblur_app_servers.py index 96be3dd796..d4b1766ea0 100755 --- a/utils/munin/newsblur_app_servers.py +++ b/utils/munin/newsblur_app_servers.py @@ -2,70 +2,80 @@ from utils.munin.base import MuninGraph import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings class NBMuninGraph(MuninGraph): - @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur App Server Page Loads', - 'graph_vlabel' : '# of page loads / server', - 'graph_args' : '-l 0', - 'total.label' : 'total', - 'total.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur App Server Page Loads", + "graph_vlabel": "# of page loads / server", + "graph_args": "-l 0", + "total.label": "total", + "total.draw": "LINE1", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), "AREASTACK") for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "AREASTACK") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - servers['total'] = self.total[0]['feeds'] + servers = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + servers["total"] = self.total[0]["feeds"] return servers - + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + ] + ) + return list(stats) - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_app_times.py b/utils/munin/newsblur_app_times.py index 228c9963a7..868014e5cd 100755 --- a/utils/munin/newsblur_app_times.py +++ b/utils/munin/newsblur_app_times.py @@ -1,51 +1,57 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur App Server Times', - 'graph_vlabel' : 'Page load time / server', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur App Server Times", + "graph_vlabel": "Page load time / server", + "graph_args": "-l 0", } stats = self.stats - graph['graph_order'] = ' '.join(sorted(s['_id'] for s in stats)) - graph.update(dict((("%s.label" % s['_id'], s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'], 'LINE1') for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"] for s in stats)) + graph.update(dict((("%s.label" % s["_id"], s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"], "LINE1") for s in stats))) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'], s['page_load']) for s in self.stats)) + servers = dict((("%s" % s["_id"], s["page_load"]) for s in self.stats)) return servers - + @property def stats(self): import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.page_loads.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$server", + "page_load": {"$avg": "$page_load"}, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "page_load" : {"$avg": "$page_load"}, - }, - }]) - + ] + ) + return list(stats) - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_classifiers.py b/utils/munin/newsblur_classifiers.py index e06515eb7a..1efbd6e9ae 100755 --- a/utils/munin/newsblur_classifiers.py +++ b/utils/munin/newsblur_classifiers.py @@ -2,34 +2,37 @@ from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Classifiers', - 'graph_vlabel' : '# of classifiers', - 'graph_args' : '-l 0', - 'feeds.label': 'feeds', - 'authors.label': 'authors', - 'tags.label': 'tags', - 'titles.label': 'titles', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Classifiers", + "graph_vlabel": "# of classifiers", + "graph_args": "-l 0", + "feeds.label": "feeds", + "authors.label": "authors", + "tags.label": "tags", + "titles.label": "titles", } def calculate_metrics(self): from apps.analyzer.models import MClassifierFeed, MClassifierAuthor, MClassifierTag, MClassifierTitle return { - 'feeds': MClassifierFeed.objects.count(), - 'authors': MClassifierAuthor.objects.count(), - 'tags': MClassifierTag.objects.count(), - 'titles': MClassifierTitle.objects.count(), + "feeds": MClassifierFeed.objects.count(), + "authors": MClassifierAuthor.objects.count(), + "tags": MClassifierTag.objects.count(), + "titles": MClassifierTitle.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_dbtimes.py b/utils/munin/newsblur_dbtimes.py index 46a7668d2f..e50be6597c 100755 --- a/utils/munin/newsblur_dbtimes.py +++ b/utils/munin/newsblur_dbtimes.py @@ -1,44 +1,47 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur DB Times', - 'graph_vlabel' : 'Database times (seconds)', - 'graph_args' : '-l 0', - 'sql_avg.label' : 'SQL avg times (5m)', - 'sql_avg.draw' : 'LINE1', - 'mongo_avg.label' : 'Mongo avg times (5m)', - 'mongo_avg.draw' : 'LINE1', - 'redis_avg.label' :'Redis avg times (5m)', - 'redis_avg.draw' : 'LINE1', - 'task_sql_avg.label' : 'Task SQL avg times (5m)', - 'task_sql_avg.draw' : 'LINE1', - 'task_mongo_avg.label' : 'Task Mongo avg times (5m)', - 'task_mongo_avg.draw' : 'LINE1', - 'task_redis_avg.label' :'Task Redis avg times (5m)', - 'task_redis_avg.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur DB Times", + "graph_vlabel": "Database times (seconds)", + "graph_args": "-l 0", + "sql_avg.label": "SQL avg times (5m)", + "sql_avg.draw": "LINE1", + "mongo_avg.label": "Mongo avg times (5m)", + "mongo_avg.draw": "LINE1", + "redis_avg.label": "Redis avg times (5m)", + "redis_avg.draw": "LINE1", + "task_sql_avg.label": "Task SQL avg times (5m)", + "task_sql_avg.draw": "LINE1", + "task_mongo_avg.label": "Task Mongo avg times (5m)", + "task_mongo_avg.draw": "LINE1", + "task_redis_avg.label": "Task Redis avg times (5m)", + "task_redis_avg.draw": "LINE1", } def calculate_metrics(self): from apps.statistics.models import MStatistics - + return { - 'sql_avg': MStatistics.get('latest_sql_avg'), - 'mongo_avg': MStatistics.get('latest_mongo_avg'), - 'redis_avg': MStatistics.get('latest_redis_avg'), - 'task_sql_avg': MStatistics.get('latest_task_sql_avg'), - 'task_mongo_avg': MStatistics.get('latest_task_mongo_avg'), - 'task_redis_avg': MStatistics.get('latest_task_redis_avg'), + "sql_avg": MStatistics.get("latest_sql_avg"), + "mongo_avg": MStatistics.get("latest_mongo_avg"), + "redis_avg": MStatistics.get("latest_redis_avg"), + "task_sql_avg": MStatistics.get("latest_task_sql_avg"), + "task_mongo_avg": MStatistics.get("latest_task_mongo_avg"), + "task_redis_avg": MStatistics.get("latest_task_redis_avg"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_errors.py b/utils/munin/newsblur_errors.py index 0e1f0d83da..ec3c967e3d 100755 --- a/utils/munin/newsblur_errors.py +++ b/utils/munin/newsblur_errors.py @@ -2,32 +2,36 @@ from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Fetching History', - 'graph_vlabel' : 'errors', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Fetching History", + "graph_vlabel": "errors", + "graph_args": "-l 0", # 'feed_errors.label': 'Feed Errors', - 'feed_success.label': 'Feed Success', + "feed_success.label": "Feed Success", # 'page_errors.label': 'Page Errors', # 'page_success.label': 'Page Success', } def calculate_metrics(self): from apps.statistics.models import MStatistics + statistics = MStatistics.all() - + return { - 'feed_success': statistics['feeds_fetched'], + "feed_success": statistics["feeds_fetched"], } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_feed_counts.py b/utils/munin/newsblur_feed_counts.py index 1ed4ff518c..97d929802a 100755 --- a/utils/munin/newsblur_feed_counts.py +++ b/utils/munin/newsblur_feed_counts.py @@ -2,25 +2,27 @@ from utils.munin.base import MuninGraph import redis import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Feed Counts', - 'graph_vlabel' : 'Feeds Feed Counts', - 'graph_args' : '-l 0', - 'scheduled_feeds.label': 'scheduled_feeds', - 'exception_feeds.label': 'exception_feeds', - 'exception_pages.label': 'exception_pages', - 'duplicate_feeds.label': 'duplicate_feeds', - 'active_feeds.label': 'active_feeds', - 'push_feeds.label': 'push_feeds', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Feed Counts", + "graph_vlabel": "Feeds Feed Counts", + "graph_args": "-l 0", + "scheduled_feeds.label": "scheduled_feeds", + "exception_feeds.label": "exception_feeds", + "exception_pages.label": "exception_pages", + "duplicate_feeds.label": "duplicate_feeds", + "active_feeds.label": "active_feeds", + "push_feeds.label": "push_feeds", } def calculate_metrics(self): @@ -28,42 +30,43 @@ def calculate_metrics(self): from apps.push.models import PushSubscription from django.conf import settings from apps.statistics.models import MStatistics - - exception_feeds = MStatistics.get('munin:exception_feeds') + + exception_feeds = MStatistics.get("munin:exception_feeds") if not exception_feeds: exception_feeds = Feed.objects.filter(has_feed_exception=True).count() - MStatistics.set('munin:exception_feeds', exception_feeds, 60*60*12) + MStatistics.set("munin:exception_feeds", exception_feeds, 60 * 60 * 12) - exception_pages = MStatistics.get('munin:exception_pages') + exception_pages = MStatistics.get("munin:exception_pages") if not exception_pages: exception_pages = Feed.objects.filter(has_page_exception=True).count() - MStatistics.set('munin:exception_pages', exception_pages, 60*60*12) + MStatistics.set("munin:exception_pages", exception_pages, 60 * 60 * 12) - duplicate_feeds = MStatistics.get('munin:duplicate_feeds') + duplicate_feeds = MStatistics.get("munin:duplicate_feeds") if not duplicate_feeds: duplicate_feeds = DuplicateFeed.objects.count() - MStatistics.set('munin:duplicate_feeds', duplicate_feeds, 60*60*12) + MStatistics.set("munin:duplicate_feeds", duplicate_feeds, 60 * 60 * 12) - active_feeds = MStatistics.get('munin:active_feeds') + active_feeds = MStatistics.get("munin:active_feeds") if not active_feeds: active_feeds = Feed.objects.filter(active_subscribers__gt=0).count() - MStatistics.set('munin:active_feeds', active_feeds, 60*60*12) + MStatistics.set("munin:active_feeds", active_feeds, 60 * 60 * 12) - push_feeds = MStatistics.get('munin:push_feeds') + push_feeds = MStatistics.get("munin:push_feeds") if not push_feeds: push_feeds = PushSubscription.objects.filter(verified=True).count() - MStatistics.set('munin:push_feeds', push_feeds, 60*60*12) + MStatistics.set("munin:push_feeds", push_feeds, 60 * 60 * 12) r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) - + return { - 'scheduled_feeds': r.zcard('scheduled_updates'), - 'exception_feeds': exception_feeds, - 'exception_pages': exception_pages, - 'duplicate_feeds': duplicate_feeds, - 'active_feeds': active_feeds, - 'push_feeds': push_feeds, + "scheduled_feeds": r.zcard("scheduled_updates"), + "exception_feeds": exception_feeds, + "exception_pages": exception_pages, + "duplicate_feeds": duplicate_feeds, + "active_feeds": active_feeds, + "push_feeds": push_feeds, } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_feeds.py b/utils/munin/newsblur_feeds.py index 61857dbc85..c02918e6f8 100755 --- a/utils/munin/newsblur_feeds.py +++ b/utils/munin/newsblur_feeds.py @@ -1,23 +1,25 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Feeds & Subscriptions', - 'graph_vlabel' : 'Feeds & Subscribers', - 'graph_args' : '-l 0', - 'feeds.label': 'feeds', - 'subscriptions.label': 'subscriptions', - 'profiles.label': 'profiles', - 'social_subscriptions.label': 'social_subscriptions', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Feeds & Subscriptions", + "graph_vlabel": "Feeds & Subscribers", + "graph_args": "-l 0", + "feeds.label": "feeds", + "subscriptions.label": "subscriptions", + "profiles.label": "profiles", + "social_subscriptions.label": "social_subscriptions", } def calculate_metrics(self): @@ -26,22 +28,23 @@ def calculate_metrics(self): from apps.social.models import MSocialProfile, MSocialSubscription from apps.statistics.models import MStatistics - feeds_count = MStatistics.get('munin:feeds_count') + feeds_count = MStatistics.get("munin:feeds_count") if not feeds_count: feeds_count = Feed.objects.all().count() - MStatistics.set('munin:feeds_count', feeds_count, 60*60*12) + MStatistics.set("munin:feeds_count", feeds_count, 60 * 60 * 12) - subscriptions_count = MStatistics.get('munin:subscriptions_count') + subscriptions_count = MStatistics.get("munin:subscriptions_count") if not subscriptions_count: subscriptions_count = UserSubscription.objects.all().count() - MStatistics.set('munin:subscriptions_count', subscriptions_count, 60*60*12) + MStatistics.set("munin:subscriptions_count", subscriptions_count, 60 * 60 * 12) return { - 'feeds': feeds_count, - 'subscriptions': subscriptions_count, - 'profiles': MSocialProfile.objects.count(), - 'social_subscriptions': MSocialSubscription.objects.count(), + "feeds": feeds_count, + "subscriptions": subscriptions_count, + "profiles": MSocialProfile.objects.count(), + "social_subscriptions": MSocialSubscription.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_loadtimes.py b/utils/munin/newsblur_loadtimes.py index 05a76eb32b..5d89be7854 100755 --- a/utils/munin/newsblur_loadtimes.py +++ b/utils/munin/newsblur_loadtimes.py @@ -1,30 +1,33 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Loadtimes', - 'graph_vlabel' : 'Loadtimes (seconds)', - 'graph_args' : '-l 0', - 'feed_loadtimes_avg_hour.label': 'Feed Loadtimes Avg (Hour)', - 'feeds_loaded_hour.label': 'Feeds Loaded (Hour)', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Loadtimes", + "graph_vlabel": "Loadtimes (seconds)", + "graph_args": "-l 0", + "feed_loadtimes_avg_hour.label": "Feed Loadtimes Avg (Hour)", + "feeds_loaded_hour.label": "Feeds Loaded (Hour)", } def calculate_metrics(self): from apps.statistics.models import MStatistics - + return { - 'feed_loadtimes_avg_hour': MStatistics.get('latest_avg_time_taken'), - 'feeds_loaded_hour': MStatistics.get('latest_sites_loaded'), + "feed_loadtimes_avg_hour": MStatistics.get("latest_avg_time_taken"), + "feeds_loaded_hour": MStatistics.get("latest_sites_loaded"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_stories.py b/utils/munin/newsblur_stories.py index 9b71664a0b..d94c35d2c8 100755 --- a/utils/munin/newsblur_stories.py +++ b/utils/munin/newsblur_stories.py @@ -1,31 +1,33 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() class NBMuninGraph(MuninGraph): - @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Stories', - 'graph_vlabel' : 'Stories', - 'graph_args' : '-l 0', - 'stories.label': 'Stories', - 'starred_stories.label': 'Starred stories', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Stories", + "graph_vlabel": "Stories", + "graph_args": "-l 0", + "stories.label": "Stories", + "starred_stories.label": "Starred stories", } def calculate_metrics(self): from apps.rss_feeds.models import MStory, MStarredStory return { - 'stories': MStory.objects.count(), - 'starred_stories': MStarredStory.objects.count(), + "stories": MStory.objects.count(), + "starred_stories": MStarredStory.objects.count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_codes.py b/utils/munin/newsblur_tasks_codes.py index 94106899d9..e613704aab 100755 --- a/utils/munin/newsblur_tasks_codes.py +++ b/utils/munin/newsblur_tasks_codes.py @@ -1,49 +1,55 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Codes', - 'graph_vlabel' : 'Status codes on feed fetch', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Codes", + "graph_vlabel": "Status codes on feed fetch", + "graph_args": "-l 0", } stats = self.stats - graph.update(dict((("_%s.label" % s['_id'], s['_id']) for s in stats))) - graph['graph_order'] = ' '.join(sorted(("_%s" % s['_id']) for s in stats)) + graph.update(dict((("_%s.label" % s["_id"], s["_id"]) for s in stats))) + graph["graph_order"] = " ".join(sorted(("_%s" % s["_id"]) for s in stats)) return graph def calculate_metrics(self): - servers = dict((("_%s" % s['_id'], s['feeds']) for s in self.stats)) - + servers = dict((("_%s" % s["_id"], s["feeds"]) for s in self.stats)) + return servers - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$feed_code", + "feeds": {"$sum": 1}, + }, }, - }, - }, { - "$group": { - "_id" : "$feed_code", - "feeds" : {"$sum": 1}, - }, - }]) - + ] + ) + return list(stats) - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_pipeline.py b/utils/munin/newsblur_tasks_pipeline.py index 430918f739..f857452d72 100755 --- a/utils/munin/newsblur_tasks_pipeline.py +++ b/utils/munin/newsblur_tasks_pipeline.py @@ -1,54 +1,61 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Pipeline', - 'graph_vlabel' : 'Feed fetch pipeline times', - 'graph_args' : '-l 0', - 'feed_fetch.label': 'feed_fetch', - 'feed_process.label': 'feed_process', - 'page.label': 'page', - 'icon.label': 'icon', - 'total.label': 'total', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Pipeline", + "graph_vlabel": "Feed fetch pipeline times", + "graph_args": "-l 0", + "feed_fetch.label": "feed_fetch", + "feed_process.label": "feed_process", + "page.label": "page", + "icon.label": "icon", + "total.label": "total", } return graph def calculate_metrics(self): return self.stats - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feed_fetch": {"$avg": "$feed_fetch"}, + "feed_process": {"$avg": "$feed_process"}, + "page": {"$avg": "$page"}, + "icon": {"$avg": "$icon"}, + "total": {"$avg": "$total"}, + }, }, - }, - }, { - "$group": { - "_id": 1, - "feed_fetch": {"$avg": "$feed_fetch"}, - "feed_process": {"$avg": "$feed_process"}, - "page": {"$avg": "$page"}, - "icon": {"$avg": "$icon"}, - "total": {"$avg": "$total"}, - }, - }]) - + ] + ) + return list(stats)[0] - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_servers.py b/utils/munin/newsblur_tasks_servers.py index 3b1286a02b..e76b920dc7 100755 --- a/utils/munin/newsblur_tasks_servers.py +++ b/utils/munin/newsblur_tasks_servers.py @@ -2,70 +2,80 @@ from utils.munin.base import MuninGraph import datetime import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings class NBMuninGraph(MuninGraph): - @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Server Fetches', - 'graph_vlabel' : '# of fetches / server', - 'graph_args' : '-l 0', - 'total.label' : 'total', - 'total.draw' : 'LINE1', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Server Fetches", + "graph_vlabel": "# of fetches / server", + "graph_args": "-l 0", + "total.label": "total", + "total.draw": "LINE1", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), "AREASTACK") for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "AREASTACK") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['feeds']) for s in self.stats)) - servers['total'] = self.total[0]['feeds'] + servers = dict((("%s" % s["_id"].replace("-", ""), s["feeds"]) for s in self.stats)) + servers["total"] = self.total[0]["feeds"] return servers - + @property def stats(self): - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gte": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "feeds" : {"$sum": 1}, - }, - }]) - + { + "$group": { + "_id": "$server", + "feeds": {"$sum": 1}, + }, + }, + ] + ) + return list(stats) - + @property def total(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": 1, + "feeds": {"$sum": 1}, + }, }, - }, - }, { - "$group": { - "_id" : 1, - "feeds" : {"$sum": 1}, - }, - }]) - + ] + ) + return list(stats) - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_tasks_times.py b/utils/munin/newsblur_tasks_times.py index 4a650a4674..e83d29ff88 100755 --- a/utils/munin/newsblur_tasks_times.py +++ b/utils/munin/newsblur_tasks_times.py @@ -1,53 +1,60 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): graph = { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Task Server Times', - 'graph_vlabel' : 'Feed fetch time / server', - 'graph_args' : '-l 0', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Task Server Times", + "graph_vlabel": "Feed fetch time / server", + "graph_args": "-l 0", } stats = self.stats - graph.update(dict((("%s.label" % s['_id'].replace('-', ''), s['_id']) for s in stats))) - graph.update(dict((("%s.draw" % s['_id'].replace('-', ''), 'LINE1') for s in stats))) - graph['graph_order'] = ' '.join(sorted(s['_id'].replace('-', '') for s in stats)) + graph.update(dict((("%s.label" % s["_id"].replace("-", ""), s["_id"]) for s in stats))) + graph.update(dict((("%s.draw" % s["_id"].replace("-", ""), "LINE1") for s in stats))) + graph["graph_order"] = " ".join(sorted(s["_id"].replace("-", "") for s in stats)) return graph def calculate_metrics(self): - servers = dict((("%s" % s['_id'].replace('-', ''), s['total']) for s in self.stats)) + servers = dict((("%s" % s["_id"].replace("-", ""), s["total"]) for s in self.stats)) return servers - + @property def stats(self): import datetime from django.conf import settings - - stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate([{ - "$match": { - "date": { - "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + + stats = settings.MONGOANALYTICSDB.nbanalytics.feed_fetches.aggregate( + [ + { + "$match": { + "date": { + "$gt": datetime.datetime.now() - datetime.timedelta(minutes=5), + }, + }, + }, + { + "$group": { + "_id": "$server", + "total": {"$avg": "$total"}, + }, }, - }, - }, { - "$group": { - "_id" : "$server", - "total" : {"$avg": "$total"}, - }, - }]) - + ] + ) + return list(stats) - -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_updates.py b/utils/munin/newsblur_updates.py index 31e0b86e96..4be495c4f7 100755 --- a/utils/munin/newsblur_updates.py +++ b/utils/munin/newsblur_updates.py @@ -2,47 +2,49 @@ import redis from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Updates', - 'graph_vlabel' : '# of updates', - 'graph_args' : '-l 0', - 'update_queue.label': 'Queued Feeds', - 'feeds_fetched.label': 'Fetched feeds last hour', - 'tasked_feeds.label': 'Tasked Feeds', - 'error_feeds.label': 'Error Feeds', - 'celery_update_feeds.label': 'Celery - Update Feeds', - 'celery_new_feeds.label': 'Celery - New Feeds', - 'celery_push_feeds.label': 'Celery - Push Feeds', - 'celery_work_queue.label': 'Celery - Work Queue', - 'celery_search_queue.label': 'Celery - Search Queue', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Updates", + "graph_vlabel": "# of updates", + "graph_args": "-l 0", + "update_queue.label": "Queued Feeds", + "feeds_fetched.label": "Fetched feeds last hour", + "tasked_feeds.label": "Tasked Feeds", + "error_feeds.label": "Error Feeds", + "celery_update_feeds.label": "Celery - Update Feeds", + "celery_new_feeds.label": "Celery - New Feeds", + "celery_push_feeds.label": "Celery - Push Feeds", + "celery_work_queue.label": "Celery - Work Queue", + "celery_search_queue.label": "Celery - Search Queue", } - def calculate_metrics(self): from django.conf import settings - + r = redis.Redis(connection_pool=settings.REDIS_FEED_UPDATE_POOL) return { - 'update_queue': r.scard("queued_feeds"), - 'feeds_fetched': r.zcard("fetched_feeds_last_hour"), - 'tasked_feeds': r.zcard("tasked_feeds"), - 'error_feeds': r.zcard("error_feeds"), - 'celery_update_feeds': r.llen("update_feeds"), - 'celery_new_feeds': r.llen("new_feeds"), - 'celery_push_feeds': r.llen("push_feeds"), - 'celery_work_queue': r.llen("work_queue"), - 'celery_search_queue': r.llen("search_indexer"), + "update_queue": r.scard("queued_feeds"), + "feeds_fetched": r.zcard("fetched_feeds_last_hour"), + "tasked_feeds": r.zcard("tasked_feeds"), + "error_feeds": r.zcard("error_feeds"), + "celery_update_feeds": r.llen("update_feeds"), + "celery_new_feeds": r.llen("new_feeds"), + "celery_push_feeds": r.llen("push_feeds"), + "celery_work_queue": r.llen("work_queue"), + "celery_search_queue": r.llen("search_indexer"), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/munin/newsblur_users.py b/utils/munin/newsblur_users.py index a2083ea953..5725dcfccf 100755 --- a/utils/munin/newsblur_users.py +++ b/utils/munin/newsblur_users.py @@ -1,24 +1,26 @@ #!/srv/newsblur/venv/newsblur3/bin/python from utils.munin.base import MuninGraph import os + os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" import django + django.setup() -class NBMuninGraph(MuninGraph): +class NBMuninGraph(MuninGraph): @property def graph_config(self): return { - 'graph_category' : 'NewsBlur', - 'graph_title' : 'NewsBlur Users', - 'graph_vlabel' : 'users', - 'graph_args' : '-l 0', - 'all.label': 'all', - 'monthly.label': 'monthly', - 'daily.label': 'daily', - 'premium.label': 'premium', - 'queued.label': 'queued', + "graph_category": "NewsBlur", + "graph_title": "NewsBlur Users", + "graph_vlabel": "users", + "graph_args": "-l 0", + "all.label": "all", + "monthly.label": "monthly", + "daily.label": "daily", + "premium.label": "premium", + "queued.label": "queued", } def calculate_metrics(self): @@ -27,15 +29,16 @@ def calculate_metrics(self): from apps.profile.models import Profile, RNewUserQueue last_month = datetime.datetime.utcnow() - datetime.timedelta(days=30) - last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60*24) + last_day = datetime.datetime.utcnow() - datetime.timedelta(minutes=60 * 24) return { - 'all': User.objects.count(), - 'monthly': Profile.objects.filter(last_seen_on__gte=last_month).count(), - 'daily': Profile.objects.filter(last_seen_on__gte=last_day).count(), - 'premium': Profile.objects.filter(is_premium=True).count(), - 'queued': RNewUserQueue.user_count(), + "all": User.objects.count(), + "monthly": Profile.objects.filter(last_seen_on__gte=last_month).count(), + "daily": Profile.objects.filter(last_seen_on__gte=last_day).count(), + "premium": Profile.objects.filter(is_premium=True).count(), + "queued": RNewUserQueue.user_count(), } -if __name__ == '__main__': + +if __name__ == "__main__": NBMuninGraph().run() diff --git a/utils/pipeline_utils.py b/utils/pipeline_utils.py index 51cbf85ff9..a73f5c4bf4 100644 --- a/utils/pipeline_utils.py +++ b/utils/pipeline_utils.py @@ -5,22 +5,25 @@ from pipeline.storage import GZIPMixin from pipeline.storage import PipelineManifestStorage + class PipelineStorage(PipelineManifestStorage): def url(self, *args, **kwargs): if settings.DEBUG_ASSETS: # print(f"Pre-Pipeline storage: {args} {kwargs}") - kwargs['name'] = re.sub(r'\.[a-f0-9]{12}\.(css|js)$', r'.\1', args[0]) + kwargs["name"] = re.sub(r"\.[a-f0-9]{12}\.(css|js)$", r".\1", args[0]) args = args[1:] url = super().url(*args, **kwargs) if settings.DEBUG_ASSETS: url = url.replace(settings.STATIC_URL, settings.MEDIA_URL) - url = re.sub(r'\.[a-f0-9]{12}\.(css|js)$', r'.\1', url) + url = re.sub(r"\.[a-f0-9]{12}\.(css|js)$", r".\1", url) # print(f"Pipeline storage: {args} {kwargs} {url}") return url + class GzipPipelineStorage(GZIPMixin, PipelineManifestStorage): pass + class AppDirectoriesFinder(PipelineAppDirectoriesFinder): """ Like AppDirectoriesFinder, but doesn't return any additional ignored patterns @@ -28,36 +31,38 @@ class AppDirectoriesFinder(PipelineAppDirectoriesFinder): This allows us to concentrate/compress our components without dragging the raw versions in too. """ + ignore_patterns = [ # '*.js', # '*.css', - '*.less', - '*.scss', - '*.styl', - '*.sh', - '*.html', - '*.ttf', - '*.md', - '*.markdown', - '*.php', - '*.txt', + "*.less", + "*.scss", + "*.styl", + "*.sh", + "*.html", + "*.ttf", + "*.md", + "*.markdown", + "*.php", + "*.txt", # '*.gif', # due to django_extensions/css/jquery.autocomplete.css: django_extensions/img/indicator.gif - '*.png', - '*.jpg', + "*.png", + "*.jpg", # '*.svg', # due to admin/css/base.css: admin/img/sorting-icons.svg - '*.ico', - '*.icns', - '*.psd', - '*.ai', - '*.sketch', - '*.emf', - '*.eps', - '*.pdf', - '*.xml', - '*LICENSE*', - '*README*', + "*.ico", + "*.icns", + "*.psd", + "*.ai", + "*.sketch", + "*.emf", + "*.eps", + "*.pdf", + "*.xml", + "*LICENSE*", + "*README*", ] - + + class FileSystemFinder(PipelineFileSystemFinder): """ Like FileSystemFinder, but doesn't return any additional ignored patterns @@ -65,48 +70,48 @@ class FileSystemFinder(PipelineFileSystemFinder): This allows us to concentrate/compress our components without dragging the raw versions in too. """ + ignore_patterns = [ # '*.js', # '*.css', # '*.less', # '*.scss', # '*.styl', - '*.sh', - '*.html', - '*.ttf', - '*.md', - '*.markdown', - '*.php', - '*.txt', - '*.gif', - '*.png', - '*.jpg', - '*media/**/*.svg', - '*.ico', - '*.icns', - '*.psd', - '*.ai', - '*.sketch', - '*.emf', - '*.eps', - '*.pdf', - '*.xml', - '*embed*', - 'blog*', + "*.sh", + "*.html", + "*.ttf", + "*.md", + "*.markdown", + "*.php", + "*.txt", + "*.gif", + "*.png", + "*.jpg", + "*media/**/*.svg", + "*.ico", + "*.icns", + "*.psd", + "*.ai", + "*.sketch", + "*.emf", + "*.eps", + "*.pdf", + "*.xml", + "*embed*", + "blog*", # # '*bookmarklet*', # # '*circular*', # # '*embed*', - '*css/mobile*', - '*extensions*', - 'fonts/*/*.css', - '*flash*', + "*css/mobile*", + "*extensions*", + "fonts/*/*.css", + "*flash*", # '*jquery-ui*', # 'mobile*', - '*safari*', + "*safari*", # # '*social*', # # '*vendor*', # 'Makefile*', # 'Gemfile*', - 'node_modules', + "node_modules", ] - \ No newline at end of file diff --git a/utils/ratelimit.py b/utils/ratelimit.py index 04e0aeaf96..9e82933249 100644 --- a/utils/ratelimit.py +++ b/utils/ratelimit.py @@ -8,39 +8,40 @@ class ratelimit(object): "Instances of this class can be used as decorators" # This class is designed to be sub-classed - minutes = 1 # The time period - requests = 4 # Number of allowed requests in that time period - - prefix = 'rl-' # Prefix for memcache key - + minutes = 1 # The time period + requests = 4 # Number of allowed requests in that time period + + prefix = "rl-" # Prefix for memcache key + def __init__(self, **options): for key, value in options.items(): setattr(self, key, value) - + def __call__(self, fn): def wrapper(request, *args, **kwargs): return self.view_wrapper(request, fn, *args, **kwargs) + functools.update_wrapper(wrapper, fn) return wrapper - + def view_wrapper(self, request, fn, *args, **kwargs): if not self.should_ratelimit(request): return fn(request, *args, **kwargs) - + counts = list(self.get_counters(request).values()) - + # Increment rate limiting counter self.cache_incr(self.current_key(request)) - + # Have they failed? if sum(counts) >= self.requests: return self.disallowed(request) - + return fn(request, *args, **kwargs) - + def cache_get_many(self, keys): return cache.get_many(keys) - + def cache_incr(self, key): # memcache is only backend that can increment atomically try: @@ -49,59 +50,53 @@ def cache_incr(self, key): cache.incr(key) except (AttributeError, ValueError): cache.set(key, cache.get(key, 0) + 1, self.expire_after()) - + def should_ratelimit(self, request): return True - + def get_counters(self, request): return self.cache_get_many(self.keys_to_check(request)) - + def keys_to_check(self, request): extra = self.key_extra(request) now = datetime.now() return [ - '%s%s-%s' % ( - self.prefix, - extra, - (now - timedelta(minutes = minute)).strftime('%Y%m%d%H%M') - ) for minute in range(self.minutes + 1) + "%s%s-%s" % (self.prefix, extra, (now - timedelta(minutes=minute)).strftime("%Y%m%d%H%M")) + for minute in range(self.minutes + 1) ] - + def current_key(self, request): - return '%s%s-%s' % ( - self.prefix, - self.key_extra(request), - datetime.now().strftime('%Y%m%d%H%M') - ) - + return "%s%s-%s" % (self.prefix, self.key_extra(request), datetime.now().strftime("%Y%m%d%H%M")) + def key_extra(self, request): - key = getattr(request.session, 'session_key', '') + key = getattr(request.session, "session_key", "") if not key: - key = request.META.get('HTTP_X_FORWARDED_FOR', '').split(',')[0] + key = request.META.get("HTTP_X_FORWARDED_FOR", "").split(",")[0] if not key: - key = request.COOKIES.get('newsblur_sessionid', '') + key = request.COOKIES.get("newsblur_sessionid", "") if not key: - key = request.META.get('HTTP_USER_AGENT', '') + key = request.META.get("HTTP_USER_AGENT", "") return key - + def disallowed(self, request): - return HttpResponse('Rate limit exceeded', status=429) - + return HttpResponse("Rate limit exceeded", status=429) + def expire_after(self): "Used for setting the memcached cache expiry" return (self.minutes + 1) * 60 + class ratelimit_post(ratelimit): "Rate limit POSTs - can be used to protect a login form" - key_field = None # If provided, this POST var will affect the rate limit - + key_field = None # If provided, this POST var will affect the rate limit + def should_ratelimit(self, request): - return request.method == 'POST' - + return request.method == "POST" + def key_extra(self, request): # IP address and key_field (if it is set) extra = super(ratelimit_post, self).key_extra(request) if self.key_field: - value = hashlib.sha1((request.POST.get(self.key_field, '')).encode('utf-8')).hexdigest() - extra += '-' + value + value = hashlib.sha1((request.POST.get(self.key_field, "")).encode("utf-8")).hexdigest() + extra += "-" + value return extra diff --git a/utils/redis_raw_log_middleware.py b/utils/redis_raw_log_middleware.py index c1049c7ae8..6e11bc13e1 100644 --- a/utils/redis_raw_log_middleware.py +++ b/utils/redis_raw_log_middleware.py @@ -6,36 +6,34 @@ from time import time from pprint import pprint -class RedisDumpMiddleware(object): +class RedisDumpMiddleware(object): def __init__(self, get_response=None): self.get_response = get_response def activated(self, request): - return (settings.DEBUG_QUERIES or - (hasattr(request, 'activated_segments') and - 'db_profiler' in request.activated_segments)) + return settings.DEBUG_QUERIES or ( + hasattr(request, "activated_segments") and "db_profiler" in request.activated_segments + ) def process_view(self, request, callback, callback_args, callback_kwargs): - if not self.activated(request): return - if not getattr(Connection, '_logging', False): + if not self.activated(request): + return + if not getattr(Connection, "_logging", False): # save old methods - setattr(Connection, '_logging', True) + setattr(Connection, "_logging", True) connection.queriesx = [] - Redis.execute_command = \ - self._instrument(Redis.execute_command) - Pipeline._execute_transaction = \ - self._instrument_pipeline(Pipeline._execute_transaction) + Redis.execute_command = self._instrument(Redis.execute_command) + Pipeline._execute_transaction = self._instrument_pipeline(Pipeline._execute_transaction) def process_celery(self, profiler): - if not self.activated(profiler): return - if not getattr(Connection, '_logging', False): + if not self.activated(profiler): + return + if not getattr(Connection, "_logging", False): # save old methods - setattr(Connection, '_logging', True) - Redis.execute_command = \ - self._instrument(Redis.execute_command) - Pipeline._execute_transaction = \ - self._instrument_pipeline(Pipeline._execute_transaction) + setattr(Connection, "_logging", True) + Redis.execute_command = self._instrument(Redis.execute_command) + Pipeline._execute_transaction = self._instrument_pipeline(Pipeline._execute_transaction) def process_response(self, request, response): # if settings.DEBUG and hasattr(self, 'orig_pack_command'): @@ -54,13 +52,16 @@ def instrumented_method(*args, **kwargs): result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - message['redis_server_name']: message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + message["redis_server_name"]: message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method def _instrument_pipeline(self, original_method): @@ -72,38 +73,41 @@ def instrumented_method(*args, **kwargs): result = original_method(*args, **kwargs) stop = time() duration = stop - start - if not getattr(connection, 'queriesx', False): + if not getattr(connection, "queriesx", False): connection.queriesx = [] - connection.queriesx.append({ - message['redis_server_name']: message, - 'time': '%.6f' % duration, - }) + connection.queriesx.append( + { + message["redis_server_name"]: message, + "time": "%.6f" % duration, + } + ) return result + return instrumented_method - + def process_message(self, *args, **kwargs): query = [] redis_server_name = None for a, arg in enumerate(args): if isinstance(arg, Redis): redis_connection = arg - redis_server_name = redis_connection.connection_pool.connection_kwargs['host'] - if 'db-redis-user' in redis_server_name: - redis_server_name = 'redis_user' - elif 'db-redis-session' in redis_server_name: - redis_server_name = 'redis_session' - elif 'db-redis-story' in redis_server_name: - redis_server_name = 'redis_story' - elif 'db-redis-pubsub' in redis_server_name: - redis_server_name = 'redis_pubsub' - elif 'db_redis' in redis_server_name: - redis_server_name = 'redis_user' + redis_server_name = redis_connection.connection_pool.connection_kwargs["host"] + if "db-redis-user" in redis_server_name: + redis_server_name = "redis_user" + elif "db-redis-session" in redis_server_name: + redis_server_name = "redis_session" + elif "db-redis-story" in redis_server_name: + redis_server_name = "redis_story" + elif "db-redis-pubsub" in redis_server_name: + redis_server_name = "redis_pubsub" + elif "db_redis" in redis_server_name: + redis_server_name = "redis_user" continue if len(str(arg)) > 100: arg = "[%s bytes]" % len(str(arg)) - query.append(str(arg).replace('\n', '')) - return { 'query': f"{redis_server_name}: {' '.join(query)}", 'redis_server_name': redis_server_name } - + query.append(str(arg).replace("\n", "")) + return {"query": f"{redis_server_name}: {' '.join(query)}", "redis_server_name": redis_server_name} + def process_pipeline(self, *args, **kwargs): queries = [] redis_server_name = None @@ -112,17 +116,17 @@ def process_pipeline(self, *args, **kwargs): continue if isinstance(arg, Pipeline): redis_connection = arg - redis_server_name = redis_connection.connection_pool.connection_kwargs['host'] - if 'db-redis-user' in redis_server_name: - redis_server_name = 'redis_user' - elif 'db-redis-session' in redis_server_name: - redis_server_name = 'redis_session' - elif 'db-redis-story' in redis_server_name: - redis_server_name = 'redis_story' - elif 'db-redis-pubsub' in redis_server_name: - redis_server_name = 'redis_pubsub' - elif 'db_redis' in redis_server_name: - redis_server_name = 'redis_user' + redis_server_name = redis_connection.connection_pool.connection_kwargs["host"] + if "db-redis-user" in redis_server_name: + redis_server_name = "redis_user" + elif "db-redis-session" in redis_server_name: + redis_server_name = "redis_session" + elif "db-redis-story" in redis_server_name: + redis_server_name = "redis_story" + elif "db-redis-pubsub" in redis_server_name: + redis_server_name = "redis_pubsub" + elif "db_redis" in redis_server_name: + redis_server_name = "redis_user" continue if not isinstance(arg, list): continue @@ -132,16 +136,16 @@ def process_pipeline(self, *args, **kwargs): if len(str(arg)) > 10000: arg = "[%s bytes]" % len(str(arg)) # query.append(str(arg).replace('\n', '')) - queries_str = '\n\t\t\t\t\t\t~FC'.join(queries) - return { 'query': f"{redis_server_name}: {queries_str}", 'redis_server_name': redis_server_name } + queries_str = "\n\t\t\t\t\t\t~FC".join(queries) + return {"query": f"{redis_server_name}: {queries_str}", "redis_server_name": redis_server_name} def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/utils/request_introspection_middleware.py b/utils/request_introspection_middleware.py index 7ec0bdf032..21e8a594ec 100644 --- a/utils/request_introspection_middleware.py +++ b/utils/request_introspection_middleware.py @@ -12,6 +12,7 @@ RECORD_SLOW_REQUESTS_ABOVE_SECONDS = 10 + class DumpRequestMiddleware: def process_request(self, request): if settings.DEBUG and request.path not in IGNORE_PATHS: @@ -20,33 +21,49 @@ def process_request(self, request): if request_items: request_items_str = f"{dict(request_items)}" if len(request_items_str) > 500: - request_items_str = request_items_str[:100] + "...[" + str(len(request_items_str)-200) + " bytes]..." + request_items_str[-100:] - logging.debug(" ---> ~FC%s ~SN~FK~BC%s~BT~ST ~FC%s~BK~FC" % (request.method, request.path, request_items_str)) + request_items_str = ( + request_items_str[:100] + + "...[" + + str(len(request_items_str) - 200) + + " bytes]..." + + request_items_str[-100:] + ) + logging.debug( + " ---> ~FC%s ~SN~FK~BC%s~BT~ST ~FC%s~BK~FC" + % (request.method, request.path, request_items_str) + ) else: logging.debug(" ---> ~FC%s ~SN~FK~BC%s~BT~ST" % (request.method, request.path)) def process_response(self, request, response): - if hasattr(request, 'sql_times_elapsed'): - redis_log = "~FCuser:%s%.6f~SNs ~FCstory:%s%.6f~SNs ~FCsession:%s%.6f~SNs ~FCpubsub:%s%.6f~SNs" % ( - self.color_db(request.sql_times_elapsed['redis_user'], '~FC'), - request.sql_times_elapsed['redis_user'], - self.color_db(request.sql_times_elapsed['redis_story'], '~FC'), - request.sql_times_elapsed['redis_story'], - self.color_db(request.sql_times_elapsed['redis_session'], '~FC'), - request.sql_times_elapsed['redis_session'], - self.color_db(request.sql_times_elapsed['redis_pubsub'], '~FC'), - request.sql_times_elapsed['redis_pubsub'], + if hasattr(request, "sql_times_elapsed"): + redis_log = ( + "~FCuser:%s%.6f~SNs ~FCstory:%s%.6f~SNs ~FCsession:%s%.6f~SNs ~FCpubsub:%s%.6f~SNs" + % ( + self.color_db(request.sql_times_elapsed["redis_user"], "~FC"), + request.sql_times_elapsed["redis_user"], + self.color_db(request.sql_times_elapsed["redis_story"], "~FC"), + request.sql_times_elapsed["redis_story"], + self.color_db(request.sql_times_elapsed["redis_session"], "~FC"), + request.sql_times_elapsed["redis_session"], + self.color_db(request.sql_times_elapsed["redis_pubsub"], "~FC"), + request.sql_times_elapsed["redis_pubsub"], + ) + ) + logging.user( + request, + "~SN~FCDB times ~SB~FK%s~SN~FC: ~FYsql: %s%.4f~SNs ~SN~FMmongo: %s%.5f~SNs ~SN~FCredis: %s" + % ( + request.path, + self.color_db(request.sql_times_elapsed["sql"], "~FY"), + request.sql_times_elapsed["sql"], + self.color_db(request.sql_times_elapsed["mongo"], "~FM"), + request.sql_times_elapsed["mongo"], + redis_log, + ), ) - logging.user(request, "~SN~FCDB times ~SB~FK%s~SN~FC: ~FYsql: %s%.4f~SNs ~SN~FMmongo: %s%.5f~SNs ~SN~FCredis: %s" % ( - request.path, - self.color_db(request.sql_times_elapsed['sql'], '~FY'), - request.sql_times_elapsed['sql'], - self.color_db(request.sql_times_elapsed['mongo'], '~FM'), - request.sql_times_elapsed['mongo'], - redis_log - )) - if hasattr(request, 'start_time'): + if hasattr(request, "start_time"): seconds = time.time() - request.start_time if seconds > RECORD_SLOW_REQUESTS_ABOVE_SECONDS: r = redis.Redis(connection_pool=settings.REDIS_STATISTICS_POOL) @@ -56,9 +73,9 @@ def process_response(self, request, response): user_id = request.user.pk if request.user.is_authenticated else "0" data_string = None if request.method == "GET": - data_string = ' '.join([f"{key}={value}" for key, value in request.GET.items()]) + data_string = " ".join([f"{key}={value}" for key, value in request.GET.items()]) elif request.method == "GET": - data_string = ' '.join([f"{key}={value}" for key, value in request.POST.items()]) + data_string = " ".join([f"{key}={value}" for key, value in request.POST.items()]) data = { "user_id": user_id, "time": round(seconds, 2), @@ -66,18 +83,18 @@ def process_response(self, request, response): "method": request.method, "data": data_string, } - pipe.lpush(name, base64.b64encode(pickle.dumps(data)).decode('utf-8')) - pipe.expire(name, 60*60*12) # 12 hours + pipe.lpush(name, base64.b64encode(pickle.dumps(data)).decode("utf-8")) + pipe.expire(name, 60 * 60 * 12) # 12 hours pipe.execute() - + return response - + def color_db(self, seconds, default): color = default - if seconds >= .25: - color = '~SB~FR' - elif seconds > .1: - color = '~FW' + if seconds >= 0.25: + color = "~SB~FR" + elif seconds > 0.1: + color = "~FW" # elif seconds == 0: # color = '~FK~SB' return color @@ -87,11 +104,11 @@ def __init__(self, get_response=None): def __call__(self, request): response = None - if hasattr(self, 'process_request'): + if hasattr(self, "process_request"): response = self.process_request(request) if not response: response = self.get_response(request) - if hasattr(self, 'process_response'): + if hasattr(self, "process_response"): response = self.process_response(request, response) return response diff --git a/utils/rtail.py b/utils/rtail.py index c2f5854b1a..07906ce2d8 100755 --- a/utils/rtail.py +++ b/utils/rtail.py @@ -24,7 +24,9 @@ def main(): # this is a remote location hostname, path = arg.split(":", 1) if options.identity: - s = subprocess.Popen(["ssh", "-i", options.identity, hostname, "tail -f " + path], stdout=subprocess.PIPE) + s = subprocess.Popen( + ["ssh", "-i", options.identity, hostname, "tail -f " + path], stdout=subprocess.PIPE + ) else: s = subprocess.Popen(["ssh", hostname, "tail -f " + path], stdout=subprocess.PIPE) s.name = arg @@ -36,8 +38,7 @@ def main(): try: while True: - r, _, _ = select.select( - [stream.stdout.fileno() for stream in streams], [], []) + r, _, _ = select.select([stream.stdout.fileno() for stream in streams], [], []) for fileno in r: for stream in streams: if stream.stdout.fileno() != fileno: @@ -46,12 +47,13 @@ def main(): if not data: streams.remove(stream) break - host = re.match(r'^(.*?)\.', stream.name) + host = re.match(r"^(.*?)\.", stream.name) combination_message = "[%-6s] %s" % (host.group(1)[:6], data) sys.stdout.write(combination_message) break except KeyboardInterrupt: print(" --- End of Logging ---") + if __name__ == "__main__": main() diff --git a/utils/s3_utils.py b/utils/s3_utils.py index d045a37210..f71ec07a77 100644 --- a/utils/s3_utils.py +++ b/utils/s3_utils.py @@ -4,19 +4,18 @@ import mimetypes from utils.image_functions import ImageOps -if '/srv/newsblur' not in ' '.join(sys.path): +if "/srv/newsblur" not in " ".join(sys.path): sys.path.append("/srv/newsblur") -os.environ['DJANGO_SETTINGS_MODULE'] = 'newsblur_web.settings' +os.environ["DJANGO_SETTINGS_MODULE"] = "newsblur_web.settings" from django.conf import settings -ACCESS_KEY = settings.S3_ACCESS_KEY -SECRET = settings.S3_SECRET +ACCESS_KEY = settings.S3_ACCESS_KEY +SECRET = settings.S3_SECRET BUCKET_NAME = settings.S3_BACKUP_BUCKET # Note that you need to create this bucket first class S3Store: - def __init__(self, bucket_name=settings.S3_AVATARS_BUCKET_NAME): # if settings.DEBUG: # import ssl @@ -31,51 +30,47 @@ def __init__(self, bucket_name=settings.S3_AVATARS_BUCKET_NAME): # ssl._create_default_https_context = _create_unverified_https_context self.bucket_name = bucket_name self.s3 = settings.S3_CONN - + def create_bucket(self, bucket_name): return self.s3.create_bucket(Bucket=bucket_name) - + def save_profile_picture(self, user_id, filename, image_body): content_type, extension = self._extract_content_type(filename) if not content_type or not extension: return - - image_name = 'profile_%s.%s' % (int(time.time()), extension) - - image = ImageOps.resize_image(image_body, 'fullsize', fit_to_size=False) + + image_name = "profile_%s.%s" % (int(time.time()), extension) + + image = ImageOps.resize_image(image_body, "fullsize", fit_to_size=False) if image: - key = 'avatars/%s/large_%s' % (user_id, image_name) + key = "avatars/%s/large_%s" % (user_id, image_name) self._save_object(key, image, content_type=content_type) - image = ImageOps.resize_image(image_body, 'thumbnail', fit_to_size=True) + image = ImageOps.resize_image(image_body, "thumbnail", fit_to_size=True) if image: - key = 'avatars/%s/thumbnail_%s' % (user_id, image_name) + key = "avatars/%s/thumbnail_%s" % (user_id, image_name) self._save_object(key, image, content_type=content_type) - + return image and image_name def _extract_content_type(self, filename): content_type = mimetypes.guess_type(filename)[0] extension = None - - if content_type == 'image/jpeg': - extension = 'jpg' - elif content_type == 'image/png': - extension = 'png' - elif content_type == 'image/gif': - extension = 'gif' - + + if content_type == "image/jpeg": + extension = "jpg" + elif content_type == "image/png": + extension = "png" + elif content_type == "image/gif": + extension = "gif" + return content_type, extension - + def _save_object(self, key, file_object, content_type=None): file_object.seek(0) s3_object = self.s3.Object(bucket_name=self.bucket_name, key=key) if content_type: - s3_object.put(Body=file_object, - ContentType=content_type, - ACL='public-read' - ) + s3_object.put(Body=file_object, ContentType=content_type, ACL="public-read") else: s3_object.put(Body=file_object) - diff --git a/utils/scrubber/__init__.py b/utils/scrubber/__init__.py index 7d38d82a12..f39114286b 100755 --- a/utils/scrubber/__init__.py +++ b/utils/scrubber/__init__.py @@ -9,13 +9,14 @@ __author__ = "Samuel Stauffer " __version__ = "1.6.1" __license__ = "BSD" -__all__ = ['Scrubber', 'SelectiveScriptScrubber', 'ScrubberWarning', 'UnapprovedJavascript', 'urlize'] +__all__ = ["Scrubber", "SelectiveScriptScrubber", "ScrubberWarning", "UnapprovedJavascript", "urlize"] import re, string from urllib.parse import urljoin from itertools import chain from bs4 import BeautifulSoup, Comment + def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): """Converts any URLs in text into clickable links. @@ -30,42 +31,59 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False): *Modified from Django* """ from urllib.parse import quote as urlquote - - LEADING_PUNCTUATION = ['(', '<', '<'] - TRAILING_PUNCTUATION = ['.', ',', ')', '>', '\n', '>'] - - word_split_re = re.compile(r'([\s\xa0]+| )') # a0 == NBSP - punctuation_re = re.compile('^(?P(?:%s)*)(?P.*?)(?P(?:%s)*)$' % \ - ('|'.join([re.escape(x) for x in LEADING_PUNCTUATION]), - '|'.join([re.escape(x) for x in TRAILING_PUNCTUATION]))) - simple_email_re = re.compile(r'^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$') + + LEADING_PUNCTUATION = ["(", "<", "<"] + TRAILING_PUNCTUATION = [".", ",", ")", ">", "\n", ">"] + + word_split_re = re.compile(r"([\s\xa0]+| )") # a0 == NBSP + punctuation_re = re.compile( + "^(?P(?:%s)*)(?P.*?)(?P(?:%s)*)$" + % ( + "|".join([re.escape(x) for x in LEADING_PUNCTUATION]), + "|".join([re.escape(x) for x in TRAILING_PUNCTUATION]), + ) + ) + simple_email_re = re.compile(r"^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$") # del x # Temporary variable def escape(html): - return html.replace('&', '&').replace('<', '<').replace('>', '>').replace('"', '"').replace("'", ''') - - trim_url = lambda x, limit=trim_url_limit: limit is not None and (len(x) > limit and ('%s...' % x[:max(0, limit - 3)])) or x + return ( + html.replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace('"', """) + .replace("'", "'") + ) + + trim_url = ( + lambda x, limit=trim_url_limit: limit is not None + and (len(x) > limit and ("%s..." % x[: max(0, limit - 3)])) + or x + ) words = word_split_re.split(text) - nofollow_attr = nofollow and ' rel="nofollow"' or '' + nofollow_attr = nofollow and ' rel="nofollow"' or "" for i, word in enumerate(words): match = None - if '.' in word or '@' in word or ':' in word: - match = punctuation_re.match(word.replace('\u2019', "'")) + if "." in word or "@" in word or ":" in word: + match = punctuation_re.match(word.replace("\u2019", "'")) if match: lead, middle, trail = match.groups() - middle = middle.encode('utf-8') - middle = middle.decode('utf-8') # Bytes to str + middle = middle.encode("utf-8") + middle = middle.decode("utf-8") # Bytes to str # Make URL we want to point to. url = None - if middle.startswith('http://') or middle.startswith('https://'): - url = urlquote(middle, safe='%/&=:;#?+*') - elif middle.startswith('www.') or ('@' not in middle and \ - middle and middle[0] in string.ascii_letters + string.digits and \ - (middle.endswith('.org') or middle.endswith('.net') or middle.endswith('.com'))): - url = urlquote('http://%s' % middle, safe='%/&=:;#?+*') - elif '@' in middle and not ':' in middle and simple_email_re.match(middle): - url = 'mailto:%s' % middle - nofollow_attr = '' + if middle.startswith("http://") or middle.startswith("https://"): + url = urlquote(middle, safe="%/&=:;#?+*") + elif middle.startswith("www.") or ( + "@" not in middle + and middle + and middle[0] in string.ascii_letters + string.digits + and (middle.endswith(".org") or middle.endswith(".net") or middle.endswith(".com")) + ): + url = urlquote("http://%s" % middle, safe="%/&=:;#?+*") + elif "@" in middle and not ":" in middle and simple_email_re.match(middle): + url = "mailto:%s" % middle + nofollow_attr = "" # Make link. if url: trimmed = trim_url(middle) @@ -73,40 +91,117 @@ def escape(html): lead, trail = escape(lead), escape(trail) url, trimmed = escape(url), escape(trimmed) middle = '%s' % (url, nofollow_attr, trimmed) - words[i] = '%s%s%s' % (lead, middle, trail) + words[i] = "%s%s%s" % (lead, middle, trail) elif autoescape: words[i] = escape(word) elif autoescape: words[i] = escape(word) return "".join(words) - + + class ScrubberWarning(object): pass + class Scrubber(object): - allowed_tags = set(( - 'a', 'abbr', 'acronym', 'b', 'bdo', 'big', 'blockquote', 'br', - 'center', 'cite', 'code', - 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'embed', 'font', - 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'ins', - 'kbd', 'li', 'object', 'ol', 'param', 'pre', 'p', 'q', - 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', - 'table', 'tbody', 'td', 'th', 'thead', 'tr', 'tt', 'ul', 'u', - 'var', 'wbr', - )) - disallowed_tags_save_content = set(( - 'blink', 'body', 'html', - )) - allowed_attributes = set(( - 'align', 'alt', 'border', 'cite', 'class', 'dir', - 'height', 'href', 'src', 'style', 'title', 'type', 'width', - 'face', 'size', # font tags - 'flashvars', # Not sure about flashvars - if any harm can come from it - 'classid', # FF needs the classid on object tags for flash - 'name', 'value', 'quality', 'data', 'scale', # for flash embed param tags, could limit to just param if this is harmful - 'salign', 'align', 'wmode', - )) # Bad attributes: 'allowscriptaccess', 'xmlns', 'target' - normalized_tag_replacements = {'b': 'strong', 'i': 'em'} + allowed_tags = set( + ( + "a", + "abbr", + "acronym", + "b", + "bdo", + "big", + "blockquote", + "br", + "center", + "cite", + "code", + "dd", + "del", + "dfn", + "div", + "dl", + "dt", + "em", + "embed", + "font", + "h1", + "h2", + "h3", + "h4", + "h5", + "h6", + "hr", + "i", + "img", + "ins", + "kbd", + "li", + "object", + "ol", + "param", + "pre", + "p", + "q", + "s", + "samp", + "small", + "span", + "strike", + "strong", + "sub", + "sup", + "table", + "tbody", + "td", + "th", + "thead", + "tr", + "tt", + "ul", + "u", + "var", + "wbr", + ) + ) + disallowed_tags_save_content = set( + ( + "blink", + "body", + "html", + ) + ) + allowed_attributes = set( + ( + "align", + "alt", + "border", + "cite", + "class", + "dir", + "height", + "href", + "src", + "style", + "title", + "type", + "width", + "face", + "size", # font tags + "flashvars", # Not sure about flashvars - if any harm can come from it + "classid", # FF needs the classid on object tags for flash + "name", + "value", + "quality", + "data", + "scale", # for flash embed param tags, could limit to just param if this is harmful + "salign", + "align", + "wmode", + ) + ) # Bad attributes: 'allowscriptaccess', 'xmlns', 'target' + normalized_tag_replacements = {"b": "strong", "i": "em"} def __init__(self, base_url=None, autolink=True, nofollow=True, remove_comments=True): self.base_url = base_url @@ -122,11 +217,12 @@ def __init__(self, base_url=None, autolink=True, nofollow=True, remove_comments= # Find all _scrub_tab_ methods self.tag_scrubbers = {} for k in chain(*[cls.__dict__ for cls in self.__class__.__mro__]): - if k.startswith('_scrub_tag_'): + if k.startswith("_scrub_tag_"): self.tag_scrubbers[k[11:]] = [getattr(self, k)] def autolink_soup(self, soup): """Autolink urls in text nodes that aren't already linked (inside anchor tags).""" + def _autolink(node): if isinstance(node, str): text = node @@ -139,6 +235,7 @@ def _autolink(node): for child in node.contents: _autolink(child) + _autolink(soup) def strip_disallowed(self, soup): @@ -159,7 +256,7 @@ def strip_disallowed(self, soup): # Remove disallowed attributes attrs = {} - if hasattr(node, 'attrs') and isinstance(node.attrs, dict): + if hasattr(node, "attrs") and isinstance(node.attrs, dict): for k, v in list(node.attrs.items()): if not v: continue @@ -170,7 +267,7 @@ def strip_disallowed(self, soup): # TODO: This probably needs to be more robust if isinstance(v, str): v2 = v.lower() - if any(x in v2 for x in ('javascript:', 'vbscript:', 'expression(')): + if any(x in v2 for x in ("javascript:", "vbscript:", "expression(")): continue attrs[k] = v @@ -190,46 +287,48 @@ def _remove_nodes(self, nodes): for keep_contentes, node in nodes: if keep_contentes and node.contents: idx = node.parent.contents.index(node) - for n in reversed(list(node.contents)): # Copy the contents list to avoid modifying while traversing + for n in reversed( + list(node.contents) + ): # Copy the contents list to avoid modifying while traversing node.parent.insert(idx, n) node.extract() def _clean_path(self, node, attrname): url = node.get(attrname) - if url and '://' not in url and not url.startswith('mailto:'): + if url and "://" not in url and not url.startswith("mailto:"): print(url) - if url[0] not in ('/', '.') and not self.base_url: + if url[0] not in ("/", ".") and not self.base_url: node[attrname] = "http://" + url - elif not url.startswith('http') and self.base_url: + elif not url.startswith("http") and self.base_url: print(self.base_url) node[attrname] = urljoin(self.base_url, url) def _scrub_tag_a(self, a): if self.nofollow: - a['rel'] = ["nofollow"] + a["rel"] = ["nofollow"] - if not a.get('class', None): - a['class'] = ["external"] + if not a.get("class", None): + a["class"] = ["external"] - self._clean_path(a, 'href') + self._clean_path(a, "href") def _scrub_tag_img(self, img): try: - if img['src'].lower().startswith('chrome://'): + if img["src"].lower().startswith("chrome://"): return True except KeyError: return True # Make sure images always have an 'alt' attribute - img['alt'] = img.get('alt', '') + img["alt"] = img.get("alt", "") - self._clean_path(img, 'src') + self._clean_path(img, "src") def _scrub_tag_font(self, node): attrs = {} - if hasattr(node, 'attrs') and isinstance(node.attrs, dict): + if hasattr(node, "attrs") and isinstance(node.attrs, dict): for k, v in list(node.attrs.items()): - if k.lower() == 'size' and v.startswith('+'): + if k.lower() == "size" and v.startswith("+"): # Remove "size=+0" continue attrs[k] = v @@ -277,49 +376,59 @@ def scrub(self, html): html = str(soup) return self._scrub_html_post(html) + class UnapprovedJavascript(ScrubberWarning): def __init__(self, src): self.src = src - self.path = src[:src.rfind('/')] + self.path = src[: src.rfind("/")] + class SelectiveScriptScrubber(Scrubber): - allowed_tags = Scrubber.allowed_tags | set(('script', 'noscript', 'iframe')) - allowed_attributes = Scrubber.allowed_attributes | set(('scrolling', 'frameborder')) + allowed_tags = Scrubber.allowed_tags | set(("script", "noscript", "iframe")) + allowed_attributes = Scrubber.allowed_attributes | set(("scrolling", "frameborder")) def __init__(self, *args, **kwargs): super(SelectiveScriptScrubber, self).__init__(*args, **kwargs) - self.allowed_script_srcs = set(( - 'http://www.statcounter.com/counter/counter_xhtml.js', - # 'http://www.google-analytics.com/urchin.js', - 'http://pub.mybloglog.com/', - 'http://rpc.bloglines.com/blogroll', - 'http://widget.blogrush.com/show.js', - 'http://re.adroll.com/', - 'http://widgetserver.com/', - 'http://pagead2.googlesyndication.com/pagead/show_ads.js', # are there pageadX for all kinds of numbers? - )) - - self.allowed_script_line_res = set(re.compile(text) for text in ( - r"^(var )?sc_project\=\d+;$", - r"^(var )?sc_invisible\=\d;$", - r"^(var )?sc_partition\=\d+;$", - r'^(var )?sc_security\="[A-Za-z0-9]+";$', - # """^_uacct \= "[^"]+";$""", - # """^urchinTracker\(\);$""", - r'^blogrush_feed = "[^"]+";$', - # """^!--$""", - # """^//-->$""", - )) - - self.allowed_iframe_srcs = set(re.compile(text) for text in ( - r'^http://www\.google\.com/calendar/embed\?[\w&;=\%]+$', # Google Calendar - r'^https?://www\.youtube\.com/', # YouTube - r'^http://player\.vimeo\.com/', # Vimeo - )) + self.allowed_script_srcs = set( + ( + "http://www.statcounter.com/counter/counter_xhtml.js", + # 'http://www.google-analytics.com/urchin.js', + "http://pub.mybloglog.com/", + "http://rpc.bloglines.com/blogroll", + "http://widget.blogrush.com/show.js", + "http://re.adroll.com/", + "http://widgetserver.com/", + "http://pagead2.googlesyndication.com/pagead/show_ads.js", # are there pageadX for all kinds of numbers? + ) + ) + + self.allowed_script_line_res = set( + re.compile(text) + for text in ( + r"^(var )?sc_project\=\d+;$", + r"^(var )?sc_invisible\=\d;$", + r"^(var )?sc_partition\=\d+;$", + r'^(var )?sc_security\="[A-Za-z0-9]+";$', + # """^_uacct \= "[^"]+";$""", + # """^urchinTracker\(\);$""", + r'^blogrush_feed = "[^"]+";$', + # """^!--$""", + # """^//-->$""", + ) + ) + + self.allowed_iframe_srcs = set( + re.compile(text) + for text in ( + r"^http://www\.google\.com/calendar/embed\?[\w&;=\%]+$", # Google Calendar + r"^https?://www\.youtube\.com/", # YouTube + r"^http://player\.vimeo\.com/", # Vimeo + ) + ) def _scrub_tag_script(self, script): - src = script.get('src', None) + src = script.get("src", None) if src: for asrc in self.allowed_script_srcs: # TODO: It could be dangerous to only check "start" of string @@ -330,7 +439,7 @@ def _scrub_tag_script(self, script): else: self.warnings.append(UnapprovedJavascript(src)) script.extract() - elif script.get('type', '') != 'text/javascript': + elif script.get("type", "") != "text/javascript": script.extract() else: for line in script.string.splitlines(): @@ -345,6 +454,6 @@ def _scrub_tag_script(self, script): break def _scrub_tag_iframe(self, iframe): - src = iframe.get('src', None) + src = iframe.get("src", None) if not src or not any(asrc.match(src) for asrc in self.allowed_iframe_srcs): iframe.extract() diff --git a/utils/story_functions.py b/utils/story_functions.py index 57172d429d..dc5092f3ef 100644 --- a/utils/story_functions.py +++ b/utils/story_functions.py @@ -23,32 +23,37 @@ from hashlib import sha1 # COMMENTS_RE = re.compile('\') -COMMENTS_RE = re.compile('\ Following %s \t[%s]" % (hostname, address)) - if hostname in found: return - s = subprocess.Popen(["ssh", "-l", NEWSBLUR_USERNAME, - "-i", os.path.expanduser("/srv/secrets-newsblur/keys/docker.key"), - address, "%s %s" % (command, path)], stdout=subprocess.PIPE) + if hostname in found: + return + s = subprocess.Popen( + [ + "ssh", + "-l", + NEWSBLUR_USERNAME, + "-i", + os.path.expanduser("/srv/secrets-newsblur/keys/docker.key"), + address, + "%s %s" % (command, path), + ], + stdout=subprocess.PIPE, + ) s.name = hostname streams.append(s) found.add(hostname) + def read_streams(streams): while True: - r, _, _ = select.select( - [stream.stdout.fileno() for stream in streams], [], []) + r, _, _ = select.select([stream.stdout.fileno() for stream in streams], [], []) for fileno in r: for stream in streams: if stream.stdout.fileno() != fileno: @@ -137,11 +152,12 @@ def read_streams(streams): sys.stdout.flush() break + if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Tail logs from multiple hosts.') - parser.add_argument('hostnames', help='Comma-separated list of hostnames', nargs='?') - parser.add_argument('roles', help='Comma-separated list of roles', nargs='?') - parser.add_argument('--command', help='Command to run on the remote host') - parser.add_argument('--path', help='Path to the log file') + parser = argparse.ArgumentParser(description="Tail logs from multiple hosts.") + parser.add_argument("hostnames", help="Comma-separated list of hostnames", nargs="?") + parser.add_argument("roles", help="Comma-separated list of roles", nargs="?") + parser.add_argument("--command", help="Command to run on the remote host") + parser.add_argument("--path", help="Path to the log file") args = parser.parse_args() main(args.hostnames, command=args.command, path=args.path) diff --git a/utils/tlnbt.py b/utils/tlnbt.py index 6d9f0f8f21..5a4a8f0901 100755 --- a/utils/tlnbt.py +++ b/utils/tlnbt.py @@ -8,4 +8,3 @@ if len(sys.argv) > 1: role = sys.argv[1] tlnb.main(roles=[role]) - \ No newline at end of file diff --git a/utils/tlnbw.py b/utils/tlnbw.py index 3a6b85dec5..9fafbccf2f 100755 --- a/utils/tlnbw.py +++ b/utils/tlnbw.py @@ -8,4 +8,3 @@ if len(sys.argv) > 1: role = sys.argv[1] tlnb.main(roles=[role]) - \ No newline at end of file diff --git a/utils/tornado_escape.py b/utils/tornado_escape.py index cf627a1cf6..cd4b2253fa 100644 --- a/utils/tornado_escape.py +++ b/utils/tornado_escape.py @@ -21,47 +21,52 @@ """ - import html.entities import re import sys -import urllib.parse +import urllib.parse from urllib.parse import parse_qs + # json module is in the standard library as of python 2.6; fall back to # simplejson if present for older versions. try: import json + assert hasattr(json, "loads") and hasattr(json, "dumps") _json_decode = json.loads _json_encode = json.dumps except Exception: try: import simplejson + _json_decode = lambda s: simplejson.loads(_unicode(s)) _json_encode = lambda v: simplejson.dumps(v) except ImportError: try: # For Google AppEngine from django.utils import simplejson + _json_decode = lambda s: simplejson.loads(_unicode(s)) _json_encode = lambda v: simplejson.dumps(v) except ImportError: + def _json_decode(s): raise NotImplementedError( "A JSON parser is required, e.g., simplejson at " - "http://pypi.python.org/pypi/simplejson/") + "http://pypi.python.org/pypi/simplejson/" + ) + _json_encode = _json_decode _XHTML_ESCAPE_RE = re.compile('[&<>"]') -_XHTML_ESCAPE_DICT = {'&': '&', '<': '<', '>': '>', '"': '"'} +_XHTML_ESCAPE_DICT = {"&": "&", "<": "<", ">": ">", '"': """} def xhtml_escape(value): """Escapes a string so it is valid within XML or XHTML.""" - return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], - to_basestring(value)) + return _XHTML_ESCAPE_RE.sub(lambda match: _XHTML_ESCAPE_DICT[match.group(0)], to_basestring(value)) def xhtml_unescape(value): @@ -94,11 +99,13 @@ def url_escape(value): """Returns a valid URL-encoded version of the given value.""" return urllib.parse.quote_plus(utf8(value)) + # python 3 changed things around enough that we need two separate # implementations of url_unescape. We also need our own implementation # of parse_qs since python 3's version insists on decoding everything. if sys.version_info[0] < 3: - def url_unescape(value, encoding='utf-8'): + + def url_unescape(value, encoding="utf-8"): """Decodes the given value from a URL. The argument may be either a byte or unicode string. @@ -113,7 +120,8 @@ def url_unescape(value, encoding='utf-8'): parse_qs_bytes = parse_qs else: - def url_unescape(value, encoding='utf-8'): + + def url_unescape(value, encoding="utf-8"): """Decodes the given value from a URL. The argument may be either a byte or unicode string. @@ -136,11 +144,10 @@ def parse_qs_bytes(qs, keep_blank_values=False, strict_parsing=False): """ # This is gross, but python3 doesn't give us another way. # Latin1 is the universal donor of character encodings. - result = parse_qs(qs, keep_blank_values, strict_parsing, - encoding='latin1', errors='strict') + result = parse_qs(qs, keep_blank_values, strict_parsing, encoding="latin1", errors="strict") encoded = {} for k, v in result.items(): - encoded[k] = [i.encode('latin1') for i in v] + encoded[k] = [i.encode("latin1") for i in v] return encoded @@ -158,6 +165,7 @@ def utf8(value): assert isinstance(value, str) return value.encode("utf-8") + _TO_UNICODE_TYPES = (str, type(None)) @@ -172,6 +180,7 @@ def to_unicode(value): assert isinstance(value, bytes) return value.decode("utf-8") + # to_unicode was previously named _unicode not because it was private, # but to avoid conflicts with the built-in unicode() function/type _unicode = to_unicode @@ -217,16 +226,20 @@ def recursive_unicode(obj): else: return obj + # I originally used the regex from # http://daringfireball.net/2010/07/improved_regex_for_matching_urls # but it gets all exponential on certain patterns (such as too many trailing # dots), causing the regex matcher to never return. # This regex should avoid those problems. -_URL_RE = re.compile(r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""") +_URL_RE = re.compile( + r"""\b((?:([\w-]+):(/{1,3})|www[.])(?:(?:(?:[^\s&()]|&|")*(?:[^!"#$%&'()*+,.:;<=>?@\[\]^`{|}~\s]))|(?:\((?:[^\s&()]|&|")*\)))+)""" +) -def linkify(text, shorten=False, extra_params="", - require_protocol=False, permitted_protocols=["http", "https"]): +def linkify( + text, shorten=False, extra_params="", require_protocol=False, permitted_protocols=["http", "https"] +): """Converts plain text into HTML with links. For example: ``linkify("Hello http://tornadoweb.org!")`` would return @@ -269,7 +282,7 @@ def make_link(m): href = m.group(1) if not proto: - href = "http://" + href # no proto specified, use http + href = "http://" + href # no proto specified, use http if callable(extra_params): params = " " + extra_params(href).strip() @@ -291,14 +304,13 @@ def make_link(m): # The path is usually not that interesting once shortened # (no more slug, etc), so it really just provides a little # extra indication of shortening. - url = url[:proto_len] + parts[0] + "/" + \ - parts[1][:8].split('?')[0].split('.')[0] + url = url[:proto_len] + parts[0] + "/" + parts[1][:8].split("?")[0].split(".")[0] if len(url) > max_len * 1.5: # still too long url = url[:max_len] if url != before_clip: - amp = url.rfind('&') + amp = url.rfind("&") # avoid splitting html char entities if amp > max_len - 5: url = url[:amp] @@ -338,4 +350,5 @@ def _build_unicode_map(): unicode_map[name] = chr(value) return unicode_map + _HTML_UNICODE_MAP = _build_unicode_map() diff --git a/utils/twitter_fetcher.py b/utils/twitter_fetcher.py index 356a3eb998..ee33d6cac7 100644 --- a/utils/twitter_fetcher.py +++ b/utils/twitter_fetcher.py @@ -14,13 +14,13 @@ from apps.reader.models import UserSubscription from utils import log as logging + class TwitterFetcher: - def __init__(self, feed, options=None): self.feed = feed self.address = self.feed.feed_address self.options = options or {} - + def fetch(self, address=None): data = {} if not address: @@ -28,133 +28,144 @@ def fetch(self, address=None): self.address = address twitter_user = None - if '/lists/' in address: + if "/lists/" in address: list_id = self.extract_list_id() if not list_id: return - + tweets, list_info = self.fetch_list_timeline(list_id) if not tweets: return - - data['title'] = "%s on Twitter" % list_info.full_name - data['link'] = "https://twitter.com%s" % list_info.uri - data['description'] = "%s on Twitter" % list_info.full_name - elif '/search' in address: + + data["title"] = "%s on Twitter" % list_info.full_name + data["link"] = "https://twitter.com%s" % list_info.uri + data["description"] = "%s on Twitter" % list_info.full_name + elif "/search" in address: search_query = self.extract_search_query() if not search_query: return - + tweets = self.fetch_search_query(search_query) if not tweets: return - - data['title'] = "\"%s\" on Twitter" % search_query - data['link'] = "%s" % address - data['description'] = "Searching \"%s\" on Twitter" % search_query + + data["title"] = '"%s" on Twitter' % search_query + data["link"] = "%s" % address + data["description"] = 'Searching "%s" on Twitter' % search_query else: username = self.extract_username() if not username: - logging.debug(u' ***> [%-30s] ~FRTwitter fetch failed: %s: No active user API access' % - (self.feed.log_title[:30], self.address)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No active user API access" + % (self.feed.log_title[:30], self.address) + ) return - + twitter_user = self.fetch_user(username) if not twitter_user: return tweets = self.user_timeline(twitter_user) - - data['title'] = "%s on Twitter" % username - data['link'] = "https://twitter.com/%s" % username - data['description'] = "%s on Twitter" % username - - data['lastBuildDate'] = datetime.datetime.utcnow() - data['generator'] = 'NewsBlur Twitter API Decrapifier - %s' % settings.NEWSBLUR_URL - data['docs'] = None - data['feed_url'] = address + + data["title"] = "%s on Twitter" % username + data["link"] = "https://twitter.com/%s" % username + data["description"] = "%s on Twitter" % username + + data["lastBuildDate"] = datetime.datetime.utcnow() + data["generator"] = "NewsBlur Twitter API Decrapifier - %s" % settings.NEWSBLUR_URL + data["docs"] = None + data["feed_url"] = address rss = feedgenerator.Atom1Feed(**data) - + for tweet in tweets: story_data = self.tweet_story(tweet.__dict__) rss.add_item(**story_data) - - return rss.writeString('utf-8') - + + return rss.writeString("utf-8") + def extract_username(self): username = None try: - address = qurl(self.address, remove=['_']) - username_groups = re.search('twitter.com/(\w+)/?$', address) + address = qurl(self.address, remove=["_"]) + username_groups = re.search("twitter.com/(\w+)/?$", address) if not username_groups: return username = username_groups.group(1) except IndexError: return - + return username def extract_list_id(self): list_id = None try: - list_groups = re.search('twitter.com/i/lists/(\w+)/?', self.address) + list_groups = re.search("twitter.com/i/lists/(\w+)/?", self.address) if not list_groups: return list_id = list_groups.group(1) except IndexError: return - + return list_id def extract_search_query(self): search_query = None - address = qurl(self.address, remove=['_']) + address = qurl(self.address, remove=["_"]) query = urlparse(address).query query_dict = parse_qs(query) - if 'q' in query_dict: - search_query = query_dict['q'][0] - + if "q" in query_dict: + search_query = query_dict["q"][0] + return search_query def twitter_api(self, include_social_services=False): twitter_api = None social_services = None - if self.options.get('requesting_user_id', None): - social_services = MSocialServices.get_user(self.options.get('requesting_user_id')) + if self.options.get("requesting_user_id", None): + social_services = MSocialServices.get_user(self.options.get("requesting_user_id")) try: twitter_api = social_services.twitter_api() except tweepy.error.TweepError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) return else: usersubs = UserSubscription.objects.filter(feed=self.feed) if not usersubs: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: No subscriptions' % - (self.feed.log_title[:30], self.address)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No subscriptions" + % (self.feed.log_title[:30], self.address) + ) return for sub in usersubs: social_services = MSocialServices.get_user(sub.user_id) - if not social_services.twitter_uid: continue + if not social_services.twitter_uid: + continue try: twitter_api = social_services.twitter_api() - if not twitter_api: + if not twitter_api: continue else: break except tweepy.error.TweepError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) continue - + if not twitter_api: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed: %s: No twitter API for %s' % - (self.feed.log_title[:30], self.address, usersubs[0].user.username)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed: %s: No twitter API for %s" + % (self.feed.log_title[:30], self.address, usersubs[0].user.username) + ) return - + if include_social_services: return twitter_api, social_services return twitter_api - + def disconnect_twitter(self): _, social_services = self.twitter_api(include_social_services=True) social_services.disconnect_twitter() @@ -163,298 +174,364 @@ def fetch_user(self, username): twitter_api = self.twitter_api() if not twitter_api: return - + try: twitter_user = twitter_api.get_user(username) except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(560, "Twitter Error: %s" % (e)) return except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(562, "Twitter Error: User suspended") # self.disconnect_twitter() return - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(563, "Twitter Error: Expired token") self.disconnect_twitter() return - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(564, "Twitter Error: User not found") return - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(460, "Twitter Error: Over capacity") return - elif '503' in message: - logging.debug(' ***> [%-30s] ~FRTwitter throwing a 503, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "503" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter throwing a 503, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(463, "Twitter Error: Twitter's down") return else: raise e - + return twitter_user - + def user_timeline(self, twitter_user): try: - tweets = twitter_user.timeline(tweet_mode='extended') + tweets = twitter_user.timeline(tweet_mode="extended") except tweepy.error.TweepError as e: message = str(e).lower() - if 'not authorized' in message: - logging.debug(' ***> [%-30s] ~FRTwitter timeline failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "not authorized" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter timeline failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: Not authorized") return [] - elif 'user not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "user not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(566, "Twitter Error: User not found") return [] - elif '429' in message: - logging.debug(' ***> [%-30s] ~FRTwitter rate limited: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "429" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter rate limited: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(567, "Twitter Error: Rate limited") return [] - elif 'blocked from viewing' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user blocked, ignoring: %s' % - (self.feed.log_title[:30], e)) + elif "blocked from viewing" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user blocked, ignoring: %s" % (self.feed.log_title[:30], e) + ) self.feed.save_feed_history(568, "Twitter Error: Blocked from viewing") return [] - elif 'over capacity' in message: - logging.debug(u' ***> [%-30s] ~FRTwitter over capacity, ignoring: %s' % - (self.feed.log_title[:30], e)) + elif "over capacity" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring: %s" % (self.feed.log_title[:30], e) + ) self.feed.save_feed_history(569, "Twitter Error: Over capacity") return [] else: raise e - + if not tweets: return [] return tweets - + def fetch_list_timeline(self, list_id): twitter_api = self.twitter_api() if not twitter_api: return None, None - + try: - list_timeline = twitter_api.list_timeline(list_id=list_id, tweet_mode='extended') + list_timeline = twitter_api.list_timeline(list_id=list_id, tweet_mode="extended") except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(570, "Twitter Error: %s" % (e)) return None, None except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(572, "Twitter Error: User suspended") # self.disconnect_twitter() return None, None - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(573, "Twitter Error: Expired token") self.disconnect_twitter() return None, None - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(574, "Twitter Error: User not found") return None, None - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return None, None - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(470, "Twitter Error: Over capacity") return None, None else: raise e - + list_info = twitter_api.get_list(list_id=list_id) - + if not list_timeline: return [], list_info return list_timeline, list_info - + def fetch_search_query(self, search_query): twitter_api = self.twitter_api() if not twitter_api: return None - + try: - list_timeline = twitter_api.search(search_query, tweet_mode='extended') + list_timeline = twitter_api.search(search_query, tweet_mode="extended") except TypeError as e: - logging.debug(' ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + logging.debug( + " ***> [%-30s] ~FRTwitter list fetch failed, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(570, "Twitter Error: %s" % (e)) return None except tweepy.error.TweepError as e: message = str(e).lower() - if 'suspended' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + if "suspended" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user suspended, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(572, "Twitter Error: User suspended") # self.disconnect_twitter() return None - elif 'expired token' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "expired token" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user expired, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(573, "Twitter Error: Expired token") self.disconnect_twitter() return None - elif 'not found' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not found" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(574, "Twitter Error: User not found") return None - elif 'not authenticate you' in message: - logging.debug(' ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "not authenticate you" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter user not found, (not) disconnecting twitter: %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(565, "Twitter Error: API not authorized") return None - elif 'over capacity' in message or 'Max retries' in message: - logging.debug(' ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s' % - (self.feed.log_title[:30], self.address, e)) + elif "over capacity" in message or "Max retries" in message: + logging.debug( + " ***> [%-30s] ~FRTwitter over capacity, ignoring... %s: %s" + % (self.feed.log_title[:30], self.address, e) + ) self.feed.save_feed_history(470, "Twitter Error: Over capacity") return None else: raise e - + if not list_timeline: return [] return list_timeline - + def tweet_story(self, user_tweet): categories = set() - - if user_tweet['full_text'].startswith('RT @'): - categories.add('retweet') - elif user_tweet['in_reply_to_status_id'] or user_tweet['full_text'].startswith('@'): - categories.add('reply') + + if user_tweet["full_text"].startswith("RT @"): + categories.add("retweet") + elif user_tweet["in_reply_to_status_id"] or user_tweet["full_text"].startswith("@"): + categories.add("reply") else: - categories.add('tweet') - if user_tweet['full_text'].startswith('RT @'): - categories.add('retweet') - if user_tweet['favorite_count']: - categories.add('liked') - if user_tweet['retweet_count']: - categories.add('retweeted') - if 'http' in user_tweet['full_text']: - categories.add('link') - + categories.add("tweet") + if user_tweet["full_text"].startswith("RT @"): + categories.add("retweet") + if user_tweet["favorite_count"]: + categories.add("liked") + if user_tweet["retweet_count"]: + categories.add("retweeted") + if "http" in user_tweet["full_text"]: + categories.add("link") + story = {} content_tweet = user_tweet entities = "" - author = user_tweet.get('author') or user_tweet.get('user') - if not isinstance(author, dict): author = author.__dict__ - author_screen_name = author['screen_name'] - author_name = author['name'] - author_fullname = "%s (%s)" % (author_name, author_screen_name) if author_screen_name != author_name else author_screen_name + author = user_tweet.get("author") or user_tweet.get("user") + if not isinstance(author, dict): + author = author.__dict__ + author_screen_name = author["screen_name"] + author_name = author["name"] + author_fullname = ( + "%s (%s)" % (author_name, author_screen_name) + if author_screen_name != author_name + else author_screen_name + ) original_author_screen_name = author_screen_name - if user_tweet['in_reply_to_user_id'] == author['id']: - categories.add('reply-to-self') + if user_tweet["in_reply_to_user_id"] == author["id"]: + categories.add("reply-to-self") retweet_author = "" - tweet_link = "https://twitter.com/%s/status/%s" % (original_author_screen_name, user_tweet['id']) - if 'retweeted_status' in user_tweet: + tweet_link = "https://twitter.com/%s/status/%s" % (original_author_screen_name, user_tweet["id"]) + if "retweeted_status" in user_tweet: retweet_author = """Retweeted by %s on %s""" % ( author_screen_name, - author['profile_image_url_https'], + author["profile_image_url_https"], author_screen_name, author_fullname, - DateFormat(user_tweet['created_at']).format('l, F jS, Y g:ia').replace('.',''), - ) - content_tweet = user_tweet['retweeted_status'].__dict__ - author = content_tweet['author'] - if not isinstance(author, dict): author = author.__dict__ - author_screen_name = author['screen_name'] - author_name = author['name'] - author_fullname = "%s (%s)" % (author_name, author_screen_name) if author_screen_name != author_name else author_screen_name - tweet_link = "https://twitter.com/%s/status/%s" % (author_screen_name, user_tweet['retweeted_status'].id) - - tweet_title = user_tweet['full_text'] - tweet_text = linebreaks(content_tweet['full_text']) - + DateFormat(user_tweet["created_at"]).format("l, F jS, Y g:ia").replace(".", ""), + ) + content_tweet = user_tweet["retweeted_status"].__dict__ + author = content_tweet["author"] + if not isinstance(author, dict): + author = author.__dict__ + author_screen_name = author["screen_name"] + author_name = author["name"] + author_fullname = ( + "%s (%s)" % (author_name, author_screen_name) + if author_screen_name != author_name + else author_screen_name + ) + tweet_link = "https://twitter.com/%s/status/%s" % ( + author_screen_name, + user_tweet["retweeted_status"].id, + ) + + tweet_title = user_tweet["full_text"] + tweet_text = linebreaks(content_tweet["full_text"]) + replaced = {} - entities_media = content_tweet['entities'].get('media', []) - if 'extended_entities' in content_tweet: - entities_media = content_tweet['extended_entities'].get('media', []) + entities_media = content_tweet["entities"].get("media", []) + if "extended_entities" in content_tweet: + entities_media = content_tweet["extended_entities"].get("media", []) for media in entities_media: - if 'media_url_https' not in media: continue - if media['type'] == 'photo': - if media.get('url') and media['url'] in tweet_text: - tweet_title = tweet_title.replace(media['url'], media['display_url']) - replacement = "%s" % (media['expanded_url'], media['display_url']) - if not replaced.get(media['url']): - tweet_text = tweet_text.replace(media['url'], replacement) - replaced[media['url']] = True - entities += "
" % media['media_url_https'] - categories.add('photo') - if media['type'] == 'video' or media['type'] == 'animated_gif': - if media.get('url') and media['url'] in tweet_text: - tweet_title = tweet_title.replace(media['url'], media['display_url']) - replacement = "%s" % (media['expanded_url'], media['display_url']) - if not replaced.get(media['url']): - tweet_text = tweet_text.replace(media['url'], replacement) - replaced[media['url']] = True + if "media_url_https" not in media: + continue + if media["type"] == "photo": + if media.get("url") and media["url"] in tweet_text: + tweet_title = tweet_title.replace(media["url"], media["display_url"]) + replacement = '%s' % (media["expanded_url"], media["display_url"]) + if not replaced.get(media["url"]): + tweet_text = tweet_text.replace(media["url"], replacement) + replaced[media["url"]] = True + entities += '
' % media["media_url_https"] + categories.add("photo") + if media["type"] == "video" or media["type"] == "animated_gif": + if media.get("url") and media["url"] in tweet_text: + tweet_title = tweet_title.replace(media["url"], media["display_url"]) + replacement = '%s' % (media["expanded_url"], media["display_url"]) + if not replaced.get(media["url"]): + tweet_text = tweet_text.replace(media["url"], replacement) + replaced[media["url"]] = True bitrate = 0 chosen_variant = None - for variant in media['video_info']['variants']: + for variant in media["video_info"]["variants"]: if not chosen_variant: chosen_variant = variant - if variant.get('bitrate', 0) > bitrate: - bitrate = variant['bitrate'] + if variant.get("bitrate", 0) > bitrate: + bitrate = variant["bitrate"] chosen_variant = variant if chosen_variant: - entities += "