issue_comments: 690860653

This data as json

html_url issue_url id node_id user created_at updated_at author_association body reactions issue performed_via_github_app
https://github.com/dogsheep/twitter-to-sqlite/issues/50#issuecomment-690860653 https://api.github.com/repos/dogsheep/twitter-to-sqlite/issues/50 690860653 MDEyOklzc3VlQ29tbWVudDY5MDg2MDY1Mw== 370930 2020-09-11T04:04:08Z 2020-09-11T04:04:08Z NONE

There's probably a nicer way of doing (hence this is a comment rather than a PR), but this appears to fix it:

--- a/twitter_to_sqlite/utils.py
+++ b/twitter_to_sqlite/utils.py
@@ -181,6 +181,7 @@ def fetch_timeline(
     args["tweet_mode"] = "extended"
     min_seen_id = None
     num_rate_limit_errors = 0
+    seen_count = 0
     while True:
         if min_seen_id is not None:
             args["max_id"] = min_seen_id - 1
@@ -208,6 +209,7 @@ def fetch_timeline(
             yield tweet
         min_seen_id = min(t["id"] for t in tweets)
         max_seen_id = max(t["id"] for t in tweets)
+        seen_count += len(tweets)
         if last_since_id is not None:
             max_seen_id = max((last_since_id, max_seen_id))
             last_since_id = max_seen_id
@@ -217,7 +219,9 @@ def fetch_timeline(
                 replace=True,
             )
         if stop_after is not None:
-            break
+            if seen_count >= stop_after:
+                break
+            args["count"] = min(args["count"], stop_after - seen_count)
         time.sleep(sleep)
{
    "total_count": 0,
    "+1": 0,
    "-1": 0,
    "laugh": 0,
    "hooray": 0,
    "confused": 0,
    "heart": 0,
    "rocket": 0,
    "eyes": 0
}
698791218  
Powered by Datasette · Query took 1.291ms · About: github-to-sqlite