diff options
Diffstat (limited to 'mastodon')
-rw-r--r-- | mastodon/Mastodon.py | 971 | ||||
-rw-r--r-- | mastodon/compat.py | 45 | ||||
-rw-r--r-- | mastodon/defaults.py | 64 | ||||
-rw-r--r-- | mastodon/error.py | 90 | ||||
-rw-r--r-- | mastodon/internals.py | 664 | ||||
-rw-r--r-- | mastodon/utility.py | 77 |
6 files changed, 972 insertions, 939 deletions
diff --git a/mastodon/Mastodon.py b/mastodon/Mastodon.py index 2074224..c454aa1 100644 --- a/mastodon/Mastodon.py +++ b/mastodon/Mastodon.py | |||
@@ -4,217 +4,46 @@ import json | |||
4 | import base64 | 4 | import base64 |
5 | import os | 5 | import os |
6 | import os.path | 6 | import os.path |
7 | import mimetypes | ||
8 | import time | 7 | import time |
9 | import random | ||
10 | import string | ||
11 | import datetime | 8 | import datetime |
12 | import collections | 9 | import collections |
13 | from contextlib import closing | ||
14 | import pytz | ||
15 | import requests | 10 | import requests |
16 | from requests.models import urlencode | 11 | from requests.models import urlencode |
17 | import dateutil | 12 | import dateutil |
18 | import dateutil.parser | 13 | import dateutil.parser |
19 | import re | 14 | import re |
20 | import copy | 15 | import copy |
21 | import threading | ||
22 | import sys | ||
23 | import six | ||
24 | import uuid | ||
25 | from decorator import decorate | ||
26 | import hashlib | ||
27 | |||
28 | IMPL_HAS_CRYPTO = True | ||
29 | try: | ||
30 | import cryptography | ||
31 | from cryptography.hazmat.backends import default_backend | ||
32 | from cryptography.hazmat.primitives.asymmetric import ec | ||
33 | from cryptography.hazmat.primitives import serialization | ||
34 | except: | ||
35 | IMPL_HAS_CRYPTO = False | ||
36 | |||
37 | IMPL_HAS_ECE = True | ||
38 | try: | ||
39 | import http_ece | ||
40 | except: | ||
41 | IMPL_HAS_ECE = False | ||
42 | |||
43 | IMPL_HAS_BLURHASH = True | ||
44 | try: | ||
45 | import blurhash | ||
46 | except: | ||
47 | IMPL_HAS_BLURHASH = False | ||
48 | |||
49 | try: | ||
50 | from urllib.parse import urlparse | ||
51 | except ImportError: | ||
52 | from urlparse import urlparse | ||
53 | |||
54 | try: | ||
55 | import magic | ||
56 | except ImportError: | ||
57 | magic = None | ||
58 | |||
59 | try: | ||
60 | from pathlib import PurePath | ||
61 | except: | ||
62 | class PurePath: | ||
63 | pass | ||
64 | 16 | ||
65 | ### | ||
66 | # Version check functions, including decorator and parser | ||
67 | ### | ||
68 | 17 | ||
18 | from .compat import IMPL_HAS_CRYPTO, IMPL_HAS_ECE, IMPL_HAS_BLURHASH | ||
19 | from .compat import cryptography, default_backend, ec, serialization, http_ece | ||
20 | from .compat import blurhash | ||
21 | from .compat import urlparse | ||
69 | 22 | ||
70 | def parse_version_string(version_string): | 23 | from .utility import parse_version_string, max_version, api_version |
71 | """Parses a semver version string, stripping off "rc" stuff if present.""" | 24 | from .utility import AttribAccessDict, AttribAccessDict |
72 | string_parts = version_string.split(".") | ||
73 | version_parts = ( | ||
74 | int(re.match("([0-9]*)", string_parts[0]).group(0)), | ||
75 | int(re.match("([0-9]*)", string_parts[1]).group(0)), | ||
76 | int(re.match("([0-9]*)", string_parts[2]).group(0)) | ||
77 | ) | ||
78 | return version_parts | ||
79 | |||
80 | def max_version(*version_strings): | ||
81 | """Returns the maximum version of all provided version strings.""" | ||
82 | return max(version_strings, key=parse_version_string) | ||
83 | |||
84 | def api_version(created_ver, last_changed_ver, return_value_ver): | ||
85 | """Version check decorator. Currently only checks Bigger Than.""" | ||
86 | def api_min_version_decorator(function): | ||
87 | def wrapper(function, self, *args, **kwargs): | ||
88 | if not self.version_check_mode == "none": | ||
89 | if self.version_check_mode == "created": | ||
90 | version = created_ver | ||
91 | else: | ||
92 | version = max_version(last_changed_ver, return_value_ver) | ||
93 | major, minor, patch = parse_version_string(version) | ||
94 | if major > self.mastodon_major: | ||
95 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
96 | elif major == self.mastodon_major and minor > self.mastodon_minor: | ||
97 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
98 | elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch: | ||
99 | raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")") | ||
100 | return function(self, *args, **kwargs) | ||
101 | function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + \ | ||
102 | created_ver + ", last changed: Mastodon v" + last_changed_ver + "*" | ||
103 | return decorate(function, wrapper) | ||
104 | return api_min_version_decorator | ||
105 | 25 | ||
106 | ### | 26 | from .error import * |
107 | # Dict helper class. | 27 | from .defaults import _DEFAULT_TIMEOUT, _DEFAULT_SCOPES, _DEFAULT_STREAM_TIMEOUT, _DEFAULT_STREAM_RECONNECT_WAIT_SEC |
108 | # Defined at top level so it can be pickled. | 28 | from .defaults import _SCOPE_SETS |
109 | ### | ||
110 | class AttribAccessDict(dict): | ||
111 | def __getattr__(self, attr): | ||
112 | if attr in self: | ||
113 | return self[attr] | ||
114 | else: | ||
115 | raise AttributeError("Attribute not found: " + str(attr)) | ||
116 | 29 | ||
117 | def __setattr__(self, attr, val): | 30 | from .internals import Mastodon as Internals |
118 | if attr in self: | ||
119 | raise AttributeError("Attribute-style access is read only") | ||
120 | super(AttribAccessDict, self).__setattr__(attr, val) | ||
121 | 31 | ||
122 | 32 | ## | |
123 | ### | ||
124 | # List helper class. | ||
125 | # Defined at top level so it can be pickled. | ||
126 | ### | ||
127 | class AttribAccessList(list): | ||
128 | def __getattr__(self, attr): | ||
129 | if attr in self: | ||
130 | return self[attr] | ||
131 | else: | ||
132 | raise AttributeError("Attribute not found: " + str(attr)) | ||
133 | |||
134 | def __setattr__(self, attr, val): | ||
135 | if attr in self: | ||
136 | raise AttributeError("Attribute-style access is read only") | ||
137 | super(AttribAccessList, self).__setattr__(attr, val) | ||
138 | |||
139 | |||
140 | ### | ||
141 | # The actual Mastodon class | 33 | # The actual Mastodon class |
142 | ### | 34 | ### |
143 | class Mastodon: | 35 | class Mastodon(Internals): |
144 | """ | 36 | """ |
145 | Thorough and easy to use Mastodon | 37 | Thorough and easy to use Mastodon |
146 | API wrapper in Python. | 38 | API wrapper in Python. |
147 | 39 | ||
148 | If anything is unclear, check the official API docs at | 40 | Main class, imports most things from modules |
149 | https://github.com/mastodon/documentation/blob/master/content/en/client/intro.md | ||
150 | """ | 41 | """ |
151 | __DEFAULT_TIMEOUT = 300 | 42 | |
152 | __DEFAULT_STREAM_TIMEOUT = 300 | 43 | # Support level |
153 | __DEFAULT_STREAM_RECONNECT_WAIT_SEC = 5 | 44 | __SUPPORTED_MASTODON_VERSION = "3.5.5" |
154 | __DEFAULT_SCOPES = ['read', 'write', 'follow', 'push'] | ||
155 | __SCOPE_SETS = { | ||
156 | 'read': [ | ||
157 | 'read:accounts', | ||
158 | 'read:blocks', | ||
159 | 'read:favourites', | ||
160 | 'read:filters', | ||
161 | 'read:follows', | ||
162 | 'read:lists', | ||
163 | 'read:mutes', | ||
164 | 'read:notifications', | ||
165 | 'read:search', | ||
166 | 'read:statuses', | ||
167 | 'read:bookmarks' | ||
168 | ], | ||
169 | 'write': [ | ||
170 | 'write:accounts', | ||
171 | 'write:blocks', | ||
172 | 'write:favourites', | ||
173 | 'write:filters', | ||
174 | 'write:follows', | ||
175 | 'write:lists', | ||
176 | 'write:media', | ||
177 | 'write:mutes', | ||
178 | 'write:notifications', | ||
179 | 'write:reports', | ||
180 | 'write:statuses', | ||
181 | 'write:bookmarks' | ||
182 | ], | ||
183 | 'follow': [ | ||
184 | 'read:blocks', | ||
185 | 'read:follows', | ||
186 | 'read:mutes', | ||
187 | 'write:blocks', | ||
188 | 'write:follows', | ||
189 | 'write:mutes', | ||
190 | ], | ||
191 | 'admin:read': [ | ||
192 | 'admin:read:accounts', | ||
193 | 'admin:read:reports', | ||
194 | 'admin:read:domain_allows', | ||
195 | 'admin:read:domain_blocks', | ||
196 | 'admin:read:ip_blocks', | ||
197 | 'admin:read:email_domain_blocks', | ||
198 | 'admin:read:canonical_email_blocks', | ||
199 | ], | ||
200 | 'admin:write': [ | ||
201 | 'admin:write:accounts', | ||
202 | 'admin:write:reports', | ||
203 | 'admin:write:domain_allows', | ||
204 | 'admin:write:domain_blocks', | ||
205 | 'admin:write:ip_blocks', | ||
206 | 'admin:write:email_domain_blocks', | ||
207 | 'admin:write:canonical_email_blocks', | ||
208 | ], | ||
209 | } | ||
210 | __VALID_SCOPES = ['read', 'write', 'follow', 'push', 'admin:read', 'admin:write'] + \ | ||
211 | __SCOPE_SETS['read'] + __SCOPE_SETS['write'] + \ | ||
212 | __SCOPE_SETS['admin:read'] + __SCOPE_SETS['admin:write'] | ||
213 | |||
214 | __SUPPORTED_MASTODON_VERSION = "3.5.4" | ||
215 | 45 | ||
216 | # Dict versions | 46 | # Dict versions |
217 | # Dict versions | ||
218 | __DICT_VERSION_APPLICATION = "2.7.2" | 47 | __DICT_VERSION_APPLICATION = "2.7.2" |
219 | __DICT_VERSION_MENTION = "1.0.0" | 48 | __DICT_VERSION_MENTION = "1.0.0" |
220 | __DICT_VERSION_MEDIA = "3.2.0" | 49 | __DICT_VERSION_MEDIA = "3.2.0" |
@@ -254,8 +83,8 @@ class Mastodon: | |||
254 | # Registering apps | 83 | # Registering apps |
255 | ### | 84 | ### |
256 | @staticmethod | 85 | @staticmethod |
257 | def create_app(client_name, scopes=__DEFAULT_SCOPES, redirect_uris=None, website=None, to_file=None, | 86 | def create_app(client_name, scopes=_DEFAULT_SCOPES, redirect_uris=None, website=None, to_file=None, |
258 | api_base_url=None, request_timeout=__DEFAULT_TIMEOUT, session=None): | 87 | api_base_url=None, request_timeout=_DEFAULT_TIMEOUT, session=None): |
259 | """ | 88 | """ |
260 | Create a new app with given `client_name` and `scopes` (The basic scopes are "read", "write", "follow" and "push" | 89 | Create a new app with given `client_name` and `scopes` (The basic scopes are "read", "write", "follow" and "push" |
261 | - more granular scopes are available, please refer to Mastodon documentation for which) on the instance given | 90 | - more granular scopes are available, please refer to Mastodon documentation for which) on the instance given |
@@ -317,7 +146,7 @@ class Mastodon: | |||
317 | # Authentication, including constructor | 146 | # Authentication, including constructor |
318 | ### | 147 | ### |
319 | def __init__(self, client_id=None, client_secret=None, access_token=None, api_base_url=None, debug_requests=False, | 148 | def __init__(self, client_id=None, client_secret=None, access_token=None, api_base_url=None, debug_requests=False, |
320 | ratelimit_method="wait", ratelimit_pacefactor=1.1, request_timeout=__DEFAULT_TIMEOUT, mastodon_version=None, | 149 | ratelimit_method="wait", ratelimit_pacefactor=1.1, request_timeout=_DEFAULT_TIMEOUT, mastodon_version=None, |
321 | version_check_mode="created", session=None, feature_set="mainline", user_agent="mastodonpy", lang=None): | 150 | version_check_mode="created", session=None, feature_set="mainline", user_agent="mastodonpy", lang=None): |
322 | """ | 151 | """ |
323 | Create a new API wrapper instance based on the given `client_secret` and `client_id` on the | 152 | Create a new API wrapper instance based on the given `client_secret` and `client_id` on the |
@@ -553,7 +382,7 @@ class Mastodon: | |||
553 | """ | 382 | """ |
554 | return Mastodon.__SUPPORTED_MASTODON_VERSION | 383 | return Mastodon.__SUPPORTED_MASTODON_VERSION |
555 | 384 | ||
556 | def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob", scopes=__DEFAULT_SCOPES, force_login=False, state=None, lang=None): | 385 | def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob", scopes=_DEFAULT_SCOPES, force_login=False, state=None, lang=None): |
557 | """ | 386 | """ |
558 | Returns the URL that a client needs to request an OAuth grant from the server. | 387 | Returns the URL that a client needs to request an OAuth grant from the server. |
559 | 388 | ||
@@ -593,7 +422,7 @@ class Mastodon: | |||
593 | formatted_params = urlencode(params) | 422 | formatted_params = urlencode(params) |
594 | return "".join([self.api_base_url, "/oauth/authorize?", formatted_params]) | 423 | return "".join([self.api_base_url, "/oauth/authorize?", formatted_params]) |
595 | 424 | ||
596 | def log_in(self, username=None, password=None, code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None, scopes=__DEFAULT_SCOPES, to_file=None): | 425 | def log_in(self, username=None, password=None, code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None, scopes=_DEFAULT_SCOPES, to_file=None): |
597 | """ | 426 | """ |
598 | Get the access token for a user. | 427 | Get the access token for a user. |
599 | 428 | ||
@@ -644,9 +473,9 @@ class Mastodon: | |||
644 | raise MastodonIllegalArgumentError('Invalid request: %s' % e) | 473 | raise MastodonIllegalArgumentError('Invalid request: %s' % e) |
645 | 474 | ||
646 | received_scopes = response["scope"].split(" ") | 475 | received_scopes = response["scope"].split(" ") |
647 | for scope_set in self.__SCOPE_SETS.keys(): | 476 | for scope_set in _SCOPE_SETS.keys(): |
648 | if scope_set in received_scopes: | 477 | if scope_set in received_scopes: |
649 | received_scopes += self.__SCOPE_SETS[scope_set] | 478 | received_scopes += _SCOPE_SETS[scope_set] |
650 | 479 | ||
651 | if not set(scopes) <= set(received_scopes): | 480 | if not set(scopes) <= set(received_scopes): |
652 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') | 481 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') |
@@ -687,7 +516,7 @@ class Mastodon: | |||
687 | self.__logged_in_id = None | 516 | self.__logged_in_id = None |
688 | 517 | ||
689 | @api_version("2.7.0", "2.7.0", "3.4.0") | 518 | @api_version("2.7.0", "2.7.0", "3.4.0") |
690 | def create_account(self, username, password, email, agreement=False, reason=None, locale="en", scopes=__DEFAULT_SCOPES, to_file=None, return_detailed_error=False): | 519 | def create_account(self, username, password, email, agreement=False, reason=None, locale="en", scopes=_DEFAULT_SCOPES, to_file=None, return_detailed_error=False): |
691 | """ | 520 | """ |
692 | Creates a new user account with the given username, password and email. "agreement" | 521 | Creates a new user account with the given username, password and email. "agreement" |
693 | must be set to true (after showing the user the instance's user agreement and having | 522 | must be set to true (after showing the user the instance's user agreement and having |
@@ -760,9 +589,9 @@ class Mastodon: | |||
760 | 589 | ||
761 | # Step 3: Check scopes, persist, et cetera | 590 | # Step 3: Check scopes, persist, et cetera |
762 | received_scopes = response["scope"].split(" ") | 591 | received_scopes = response["scope"].split(" ") |
763 | for scope_set in self.__SCOPE_SETS.keys(): | 592 | for scope_set in _SCOPE_SETS.keys(): |
764 | if scope_set in received_scopes: | 593 | if scope_set in received_scopes: |
765 | received_scopes += self.__SCOPE_SETS[scope_set] | 594 | received_scopes += _SCOPE_SETS[scope_set] |
766 | 595 | ||
767 | if not set(scopes) <= set(received_scopes): | 596 | if not set(scopes) <= set(received_scopes): |
768 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') | 597 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') |
@@ -3853,7 +3682,7 @@ class Mastodon: | |||
3853 | # Streaming | 3682 | # Streaming |
3854 | ### | 3683 | ### |
3855 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3684 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3856 | def stream_user(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3685 | def stream_user(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3857 | """ | 3686 | """ |
3858 | Streams events that are relevant to the authorized user, i.e. home | 3687 | Streams events that are relevant to the authorized user, i.e. home |
3859 | timeline and notifications. | 3688 | timeline and notifications. |
@@ -3861,21 +3690,21 @@ class Mastodon: | |||
3861 | return self.__stream('/api/v1/streaming/user', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3690 | return self.__stream('/api/v1/streaming/user', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3862 | 3691 | ||
3863 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3692 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3864 | def stream_public(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3693 | def stream_public(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3865 | """ | 3694 | """ |
3866 | Streams public events. | 3695 | Streams public events. |
3867 | """ | 3696 | """ |
3868 | return self.__stream('/api/v1/streaming/public', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3697 | return self.__stream('/api/v1/streaming/public', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3869 | 3698 | ||
3870 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3699 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3871 | def stream_local(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3700 | def stream_local(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3872 | """ | 3701 | """ |
3873 | Streams local public events. | 3702 | Streams local public events. |
3874 | """ | 3703 | """ |
3875 | return self.__stream('/api/v1/streaming/public/local', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3704 | return self.__stream('/api/v1/streaming/public/local', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3876 | 3705 | ||
3877 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3706 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3878 | def stream_hashtag(self, tag, listener, local=False, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3707 | def stream_hashtag(self, tag, listener, local=False, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3879 | """ | 3708 | """ |
3880 | Stream for all public statuses for the hashtag 'tag' seen by the connected | 3709 | Stream for all public statuses for the hashtag 'tag' seen by the connected |
3881 | instance. | 3710 | instance. |
@@ -3891,7 +3720,7 @@ class Mastodon: | |||
3891 | return self.__stream("{}?tag={}".format(base, tag), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3720 | return self.__stream("{}?tag={}".format(base, tag), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3892 | 3721 | ||
3893 | @api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS) | 3722 | @api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS) |
3894 | def stream_list(self, id, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3723 | def stream_list(self, id, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3895 | """ | 3724 | """ |
3896 | Stream events for the current user, restricted to accounts on the given | 3725 | Stream events for the current user, restricted to accounts on the given |
3897 | list. | 3726 | list. |
@@ -3900,7 +3729,7 @@ class Mastodon: | |||
3900 | return self.__stream("/api/v1/streaming/list?list={}".format(id), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3729 | return self.__stream("/api/v1/streaming/list?list={}".format(id), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3901 | 3730 | ||
3902 | @api_version("2.6.0", "2.6.0", __DICT_VERSION_STATUS) | 3731 | @api_version("2.6.0", "2.6.0", __DICT_VERSION_STATUS) |
3903 | def stream_direct(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3732 | def stream_direct(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3904 | """ | 3733 | """ |
3905 | Streams direct message events for the logged-in user, as conversation events. | 3734 | Streams direct message events for the logged-in user, as conversation events. |
3906 | """ | 3735 | """ |
@@ -3916,739 +3745,3 @@ class Mastodon: | |||
3916 | return True | 3745 | return True |
3917 | return False | 3746 | return False |
3918 | 3747 | ||
3919 | ### | ||
3920 | # Internal helpers, dragons probably | ||
3921 | ### | ||
3922 | def __datetime_to_epoch(self, date_time): | ||
3923 | """ | ||
3924 | Converts a python datetime to unix epoch, accounting for | ||
3925 | time zones and such. | ||
3926 | |||
3927 | Assumes UTC if timezone is not given. | ||
3928 | """ | ||
3929 | date_time_utc = None | ||
3930 | if date_time.tzinfo is None: | ||
3931 | date_time_utc = date_time.replace(tzinfo=pytz.utc) | ||
3932 | else: | ||
3933 | date_time_utc = date_time.astimezone(pytz.utc) | ||
3934 | |||
3935 | epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc) | ||
3936 | |||
3937 | return (date_time_utc - epoch_utc).total_seconds() | ||
3938 | |||
3939 | def __get_logged_in_id(self): | ||
3940 | """ | ||
3941 | Fetch the logged in user's ID, with caching. ID is reset on calls to log_in. | ||
3942 | """ | ||
3943 | if self.__logged_in_id is None: | ||
3944 | self.__logged_in_id = self.account_verify_credentials().id | ||
3945 | return self.__logged_in_id | ||
3946 | |||
3947 | @staticmethod | ||
3948 | def __json_allow_dict_attrs(json_object): | ||
3949 | """ | ||
3950 | Makes it possible to use attribute notation to access a dicts | ||
3951 | elements, while still allowing the dict to act as a dict. | ||
3952 | """ | ||
3953 | if isinstance(json_object, dict): | ||
3954 | return AttribAccessDict(json_object) | ||
3955 | return json_object | ||
3956 | |||
3957 | @staticmethod | ||
3958 | def __json_date_parse(json_object): | ||
3959 | """ | ||
3960 | Parse dates in certain known json fields, if possible. | ||
3961 | """ | ||
3962 | known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at", | ||
3963 | "updated_at", "last_status_at", "starts_at", "ends_at", "published_at", "edited_at"] | ||
3964 | mark_delete = [] | ||
3965 | for k, v in json_object.items(): | ||
3966 | if k in known_date_fields: | ||
3967 | if v is not None: | ||
3968 | try: | ||
3969 | if isinstance(v, int): | ||
3970 | json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc) | ||
3971 | else: | ||
3972 | json_object[k] = dateutil.parser.parse(v) | ||
3973 | except: | ||
3974 | # When we can't parse a date, we just leave the field out | ||
3975 | mark_delete.append(k) | ||
3976 | # Two step process because otherwise python gets very upset | ||
3977 | for k in mark_delete: | ||
3978 | del json_object[k] | ||
3979 | return json_object | ||
3980 | |||
3981 | @staticmethod | ||
3982 | def __json_truefalse_parse(json_object): | ||
3983 | """ | ||
3984 | Parse 'True' / 'False' strings in certain known fields | ||
3985 | """ | ||
3986 | for key in ('follow', 'favourite', 'reblog', 'mention'): | ||
3987 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
3988 | if json_object[key].lower() == 'true': | ||
3989 | json_object[key] = True | ||
3990 | if json_object[key].lower() == 'false': | ||
3991 | json_object[key] = False | ||
3992 | return json_object | ||
3993 | |||
3994 | @staticmethod | ||
3995 | def __json_strnum_to_bignum(json_object): | ||
3996 | """ | ||
3997 | Converts json string numerals to native python bignums. | ||
3998 | """ | ||
3999 | for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses', 'day', 'last_read_id'): | ||
4000 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
4001 | try: | ||
4002 | json_object[key] = int(json_object[key]) | ||
4003 | except ValueError: | ||
4004 | pass | ||
4005 | |||
4006 | return json_object | ||
4007 | |||
4008 | @staticmethod | ||
4009 | def __json_hooks(json_object): | ||
4010 | """ | ||
4011 | All the json hooks. Used in request parsing. | ||
4012 | """ | ||
4013 | json_object = Mastodon.__json_strnum_to_bignum(json_object) | ||
4014 | json_object = Mastodon.__json_date_parse(json_object) | ||
4015 | json_object = Mastodon.__json_truefalse_parse(json_object) | ||
4016 | json_object = Mastodon.__json_allow_dict_attrs(json_object) | ||
4017 | return json_object | ||
4018 | |||
4019 | @staticmethod | ||
4020 | def __consistent_isoformat_utc(datetime_val): | ||
4021 | """ | ||
4022 | Function that does what isoformat does but it actually does the same | ||
4023 | every time instead of randomly doing different things on some systems | ||
4024 | and also it represents that time as the equivalent UTC time. | ||
4025 | """ | ||
4026 | isotime = datetime_val.astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%S%z") | ||
4027 | if isotime[-2] != ":": | ||
4028 | isotime = isotime[:-2] + ":" + isotime[-2:] | ||
4029 | return isotime | ||
4030 | |||
4031 | def __api_request(self, method, endpoint, params={}, files={}, headers={}, access_token_override=None, base_url_override=None, | ||
4032 | do_ratelimiting=True, use_json=False, parse=True, return_response_object=False, skip_error_check=False, lang_override=None): | ||
4033 | """ | ||
4034 | Internal API request helper. | ||
4035 | """ | ||
4036 | response = None | ||
4037 | remaining_wait = 0 | ||
4038 | |||
4039 | # Add language to params if not None | ||
4040 | lang = self.lang | ||
4041 | if lang_override is not None: | ||
4042 | lang = lang_override | ||
4043 | if lang is not None: | ||
4044 | params["lang"] = lang | ||
4045 | |||
4046 | # "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it | ||
4047 | # would take to not hit the rate limit at that request rate. | ||
4048 | if do_ratelimiting and self.ratelimit_method == "pace": | ||
4049 | if self.ratelimit_remaining == 0: | ||
4050 | to_next = self.ratelimit_reset - time.time() | ||
4051 | if to_next > 0: | ||
4052 | # As a precaution, never sleep longer than 5 minutes | ||
4053 | to_next = min(to_next, 5 * 60) | ||
4054 | time.sleep(to_next) | ||
4055 | else: | ||
4056 | time_waited = time.time() - self.ratelimit_lastcall | ||
4057 | time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining) | ||
4058 | remaining_wait = time_wait - time_waited | ||
4059 | |||
4060 | if remaining_wait > 0: | ||
4061 | to_next = remaining_wait / self.ratelimit_pacefactor | ||
4062 | to_next = min(to_next, 5 * 60) | ||
4063 | time.sleep(to_next) | ||
4064 | |||
4065 | # Generate request headers | ||
4066 | headers = copy.deepcopy(headers) | ||
4067 | if self.access_token is not None: | ||
4068 | headers['Authorization'] = 'Bearer ' + self.access_token | ||
4069 | if access_token_override is not None: | ||
4070 | headers['Authorization'] = 'Bearer ' + access_token_override | ||
4071 | |||
4072 | # Add user-agent | ||
4073 | if self.user_agent: | ||
4074 | headers['User-Agent'] = self.user_agent | ||
4075 | |||
4076 | # Determine base URL | ||
4077 | base_url = self.api_base_url | ||
4078 | if base_url_override is not None: | ||
4079 | base_url = base_url_override | ||
4080 | |||
4081 | if self.debug_requests: | ||
4082 | print('Mastodon: Request to endpoint "' + base_url + | ||
4083 | endpoint + '" using method "' + method + '".') | ||
4084 | print('Parameters: ' + str(params)) | ||
4085 | print('Headers: ' + str(headers)) | ||
4086 | print('Files: ' + str(files)) | ||
4087 | |||
4088 | # Make request | ||
4089 | request_complete = False | ||
4090 | while not request_complete: | ||
4091 | request_complete = True | ||
4092 | |||
4093 | response_object = None | ||
4094 | try: | ||
4095 | kwargs = dict(headers=headers, files=files, timeout=self.request_timeout) | ||
4096 | if use_json: | ||
4097 | kwargs['json'] = params | ||
4098 | elif method == 'GET': | ||
4099 | kwargs['params'] = params | ||
4100 | else: | ||
4101 | kwargs['data'] = params | ||
4102 | |||
4103 | response_object = self.session.request(method, base_url + endpoint, **kwargs) | ||
4104 | except Exception as e: | ||
4105 | raise MastodonNetworkError( | ||
4106 | "Could not complete request: %s" % e) | ||
4107 | |||
4108 | if response_object is None: | ||
4109 | raise MastodonIllegalArgumentError("Illegal request.") | ||
4110 | |||
4111 | # Parse rate limiting headers | ||
4112 | if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting: | ||
4113 | self.ratelimit_remaining = int( | ||
4114 | response_object.headers['X-RateLimit-Remaining']) | ||
4115 | self.ratelimit_limit = int( | ||
4116 | response_object.headers['X-RateLimit-Limit']) | ||
4117 | |||
4118 | # For gotosocial, we need an int representation, but for non-ints this would crash | ||
4119 | try: | ||
4120 | ratelimit_intrep = str( | ||
4121 | int(response_object.headers['X-RateLimit-Reset'])) | ||
4122 | except: | ||
4123 | ratelimit_intrep = None | ||
4124 | |||
4125 | try: | ||
4126 | if ratelimit_intrep is not None and ratelimit_intrep == response_object.headers['X-RateLimit-Reset']: | ||
4127 | self.ratelimit_reset = int( | ||
4128 | response_object.headers['X-RateLimit-Reset']) | ||
4129 | else: | ||
4130 | ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset']) | ||
4131 | self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime) | ||
4132 | |||
4133 | # Adjust server time to local clock | ||
4134 | if 'Date' in response_object.headers: | ||
4135 | server_time_datetime = dateutil.parser.parse(response_object.headers['Date']) | ||
4136 | server_time = self.__datetime_to_epoch(server_time_datetime) | ||
4137 | server_time_diff = time.time() - server_time | ||
4138 | self.ratelimit_reset += server_time_diff | ||
4139 | self.ratelimit_lastcall = time.time() | ||
4140 | except Exception as e: | ||
4141 | raise MastodonRatelimitError( | ||
4142 | "Rate limit time calculations failed: %s" % e) | ||
4143 | |||
4144 | # Handle response | ||
4145 | if self.debug_requests: | ||
4146 | print('Mastodon: Response received with code ' + str(response_object.status_code) + '.') | ||
4147 | print('response headers: ' + str(response_object.headers)) | ||
4148 | print('Response text content: ' + str(response_object.text)) | ||
4149 | |||
4150 | if not response_object.ok: | ||
4151 | try: | ||
4152 | response = response_object.json(object_hook=self.__json_hooks) | ||
4153 | if isinstance(response, dict) and 'error' in response: | ||
4154 | error_msg = response['error'] | ||
4155 | elif isinstance(response, str): | ||
4156 | error_msg = response | ||
4157 | else: | ||
4158 | error_msg = None | ||
4159 | except ValueError: | ||
4160 | error_msg = None | ||
4161 | |||
4162 | # Handle rate limiting | ||
4163 | if response_object.status_code == 429: | ||
4164 | if self.ratelimit_method == 'throw' or not do_ratelimiting: | ||
4165 | raise MastodonRatelimitError('Hit rate limit.') | ||
4166 | elif self.ratelimit_method in ('wait', 'pace'): | ||
4167 | to_next = self.ratelimit_reset - time.time() | ||
4168 | if to_next > 0: | ||
4169 | # As a precaution, never sleep longer than 5 minutes | ||
4170 | to_next = min(to_next, 5 * 60) | ||
4171 | time.sleep(to_next) | ||
4172 | request_complete = False | ||
4173 | continue | ||
4174 | |||
4175 | if not skip_error_check: | ||
4176 | if response_object.status_code == 404: | ||
4177 | ex_type = MastodonNotFoundError | ||
4178 | if not error_msg: | ||
4179 | error_msg = 'Endpoint not found.' | ||
4180 | # this is for compatibility with older versions | ||
4181 | # which raised MastodonAPIError('Endpoint not found.') | ||
4182 | # on any 404 | ||
4183 | elif response_object.status_code == 401: | ||
4184 | ex_type = MastodonUnauthorizedError | ||
4185 | elif response_object.status_code == 500: | ||
4186 | ex_type = MastodonInternalServerError | ||
4187 | elif response_object.status_code == 502: | ||
4188 | ex_type = MastodonBadGatewayError | ||
4189 | elif response_object.status_code == 503: | ||
4190 | ex_type = MastodonServiceUnavailableError | ||
4191 | elif response_object.status_code == 504: | ||
4192 | ex_type = MastodonGatewayTimeoutError | ||
4193 | elif response_object.status_code >= 500 and \ | ||
4194 | response_object.status_code <= 511: | ||
4195 | ex_type = MastodonServerError | ||
4196 | else: | ||
4197 | ex_type = MastodonAPIError | ||
4198 | |||
4199 | raise ex_type('Mastodon API returned error', response_object.status_code, response_object.reason, error_msg) | ||
4200 | |||
4201 | if return_response_object: | ||
4202 | return response_object | ||
4203 | |||
4204 | if parse: | ||
4205 | try: | ||
4206 | response = response_object.json(object_hook=self.__json_hooks) | ||
4207 | except: | ||
4208 | raise MastodonAPIError( | ||
4209 | "Could not parse response as JSON, response code was %s, " | ||
4210 | "bad json content was '%s'" % (response_object.status_code, | ||
4211 | response_object.content)) | ||
4212 | else: | ||
4213 | response = response_object.content | ||
4214 | |||
4215 | # Parse link headers | ||
4216 | if isinstance(response, list) and \ | ||
4217 | 'Link' in response_object.headers and \ | ||
4218 | response_object.headers['Link'] != "": | ||
4219 | response = AttribAccessList(response) | ||
4220 | tmp_urls = requests.utils.parse_header_links( | ||
4221 | response_object.headers['Link'].rstrip('>').replace('>,<', ',<')) | ||
4222 | for url in tmp_urls: | ||
4223 | if 'rel' not in url: | ||
4224 | continue | ||
4225 | |||
4226 | if url['rel'] == 'next': | ||
4227 | # Be paranoid and extract max_id specifically | ||
4228 | next_url = url['url'] | ||
4229 | matchgroups = re.search( | ||
4230 | r"[?&]max_id=([^&]+)", next_url) | ||
4231 | |||
4232 | if matchgroups: | ||
4233 | next_params = copy.deepcopy(params) | ||
4234 | next_params['_pagination_method'] = method | ||
4235 | next_params['_pagination_endpoint'] = endpoint | ||
4236 | max_id = matchgroups.group(1) | ||
4237 | if max_id.isdigit(): | ||
4238 | next_params['max_id'] = int(max_id) | ||
4239 | else: | ||
4240 | next_params['max_id'] = max_id | ||
4241 | if "since_id" in next_params: | ||
4242 | del next_params['since_id'] | ||
4243 | if "min_id" in next_params: | ||
4244 | del next_params['min_id'] | ||
4245 | response._pagination_next = next_params | ||
4246 | |||
4247 | # Maybe other API users rely on the pagination info in the last item | ||
4248 | # Will be removed in future | ||
4249 | if isinstance(response[-1], AttribAccessDict): | ||
4250 | response[-1]._pagination_next = next_params | ||
4251 | |||
4252 | if url['rel'] == 'prev': | ||
4253 | # Be paranoid and extract since_id or min_id specifically | ||
4254 | prev_url = url['url'] | ||
4255 | |||
4256 | # Old and busted (pre-2.6.0): since_id pagination | ||
4257 | matchgroups = re.search( | ||
4258 | r"[?&]since_id=([^&]+)", prev_url) | ||
4259 | if matchgroups: | ||
4260 | prev_params = copy.deepcopy(params) | ||
4261 | prev_params['_pagination_method'] = method | ||
4262 | prev_params['_pagination_endpoint'] = endpoint | ||
4263 | since_id = matchgroups.group(1) | ||
4264 | if since_id.isdigit(): | ||
4265 | prev_params['since_id'] = int(since_id) | ||
4266 | else: | ||
4267 | prev_params['since_id'] = since_id | ||
4268 | if "max_id" in prev_params: | ||
4269 | del prev_params['max_id'] | ||
4270 | response._pagination_prev = prev_params | ||
4271 | |||
4272 | # Maybe other API users rely on the pagination info in the first item | ||
4273 | # Will be removed in future | ||
4274 | if isinstance(response[0], AttribAccessDict): | ||
4275 | response[0]._pagination_prev = prev_params | ||
4276 | |||
4277 | # New and fantastico (post-2.6.0): min_id pagination | ||
4278 | matchgroups = re.search( | ||
4279 | r"[?&]min_id=([^&]+)", prev_url) | ||
4280 | if matchgroups: | ||
4281 | prev_params = copy.deepcopy(params) | ||
4282 | prev_params['_pagination_method'] = method | ||
4283 | prev_params['_pagination_endpoint'] = endpoint | ||
4284 | min_id = matchgroups.group(1) | ||
4285 | if min_id.isdigit(): | ||
4286 | prev_params['min_id'] = int(min_id) | ||
4287 | else: | ||
4288 | prev_params['min_id'] = min_id | ||
4289 | if "max_id" in prev_params: | ||
4290 | del prev_params['max_id'] | ||
4291 | response._pagination_prev = prev_params | ||
4292 | |||
4293 | # Maybe other API users rely on the pagination info in the first item | ||
4294 | # Will be removed in future | ||
4295 | if isinstance(response[0], AttribAccessDict): | ||
4296 | response[0]._pagination_prev = prev_params | ||
4297 | |||
4298 | return response | ||
4299 | |||
4300 | def __get_streaming_base(self): | ||
4301 | """ | ||
4302 | Internal streaming API helper. | ||
4303 | |||
4304 | Returns the correct URL for the streaming API. | ||
4305 | """ | ||
4306 | instance = self.instance() | ||
4307 | if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url: | ||
4308 | # This is probably a websockets URL, which is really for the browser, but requests can't handle it | ||
4309 | # So we do this below to turn it into an HTTPS or HTTP URL | ||
4310 | parse = urlparse(instance["urls"]["streaming_api"]) | ||
4311 | if parse.scheme == 'wss': | ||
4312 | url = "https://" + parse.netloc | ||
4313 | elif parse.scheme == 'ws': | ||
4314 | url = "http://" + parse.netloc | ||
4315 | else: | ||
4316 | raise MastodonAPIError( | ||
4317 | "Could not parse streaming api location returned from server: {}.".format( | ||
4318 | instance["urls"]["streaming_api"])) | ||
4319 | else: | ||
4320 | url = self.api_base_url | ||
4321 | return url | ||
4322 | |||
4323 | def __stream(self, endpoint, listener, params={}, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | ||
4324 | """ | ||
4325 | Internal streaming API helper. | ||
4326 | |||
4327 | Returns a handle to the open connection that the user can close if they | ||
4328 | wish to terminate it. | ||
4329 | """ | ||
4330 | |||
4331 | # Check if we have to redirect | ||
4332 | url = self.__get_streaming_base() | ||
4333 | |||
4334 | # The streaming server can't handle two slashes in a path, so remove trailing slashes | ||
4335 | if url[-1] == '/': | ||
4336 | url = url[:-1] | ||
4337 | |||
4338 | # Connect function (called and then potentially passed to async handler) | ||
4339 | def connect_func(): | ||
4340 | headers = {"Authorization": "Bearer " + | ||
4341 | self.access_token} if self.access_token else {} | ||
4342 | if self.user_agent: | ||
4343 | headers['User-Agent'] = self.user_agent | ||
4344 | connection = self.session.get(url + endpoint, headers=headers, data=params, stream=True, | ||
4345 | timeout=(self.request_timeout, timeout)) | ||
4346 | |||
4347 | if connection.status_code != 200: | ||
4348 | raise MastodonNetworkError( | ||
4349 | "Could not connect to streaming server: %s" % connection.reason) | ||
4350 | return connection | ||
4351 | connection = None | ||
4352 | |||
4353 | # Async stream handler | ||
4354 | class __stream_handle(): | ||
4355 | def __init__(self, connection, connect_func, reconnect_async, reconnect_async_wait_sec): | ||
4356 | self.closed = False | ||
4357 | self.running = True | ||
4358 | self.connection = connection | ||
4359 | self.connect_func = connect_func | ||
4360 | self.reconnect_async = reconnect_async | ||
4361 | self.reconnect_async_wait_sec = reconnect_async_wait_sec | ||
4362 | self.reconnecting = False | ||
4363 | |||
4364 | def close(self): | ||
4365 | self.closed = True | ||
4366 | if self.connection is not None: | ||
4367 | self.connection.close() | ||
4368 | |||
4369 | def is_alive(self): | ||
4370 | return self._thread.is_alive() | ||
4371 | |||
4372 | def is_receiving(self): | ||
4373 | if self.closed or not self.running or self.reconnecting or not self.is_alive(): | ||
4374 | return False | ||
4375 | else: | ||
4376 | return True | ||
4377 | |||
4378 | def _sleep_attentive(self): | ||
4379 | if self._thread != threading.current_thread(): | ||
4380 | raise RuntimeError( | ||
4381 | "Illegal call from outside the stream_handle thread") | ||
4382 | time_remaining = self.reconnect_async_wait_sec | ||
4383 | while time_remaining > 0 and not self.closed: | ||
4384 | time.sleep(0.5) | ||
4385 | time_remaining -= 0.5 | ||
4386 | |||
4387 | def _threadproc(self): | ||
4388 | self._thread = threading.current_thread() | ||
4389 | |||
4390 | # Run until closed or until error if not autoreconnecting | ||
4391 | while self.running: | ||
4392 | if self.connection is not None: | ||
4393 | with closing(self.connection) as r: | ||
4394 | try: | ||
4395 | listener.handle_stream(r) | ||
4396 | except (AttributeError, MastodonMalformedEventError, MastodonNetworkError) as e: | ||
4397 | if not (self.closed or self.reconnect_async): | ||
4398 | raise e | ||
4399 | else: | ||
4400 | if self.closed: | ||
4401 | self.running = False | ||
4402 | |||
4403 | # Reconnect loop. Try immediately once, then with delays on error. | ||
4404 | if (self.reconnect_async and not self.closed) or self.connection is None: | ||
4405 | self.reconnecting = True | ||
4406 | connect_success = False | ||
4407 | while not connect_success: | ||
4408 | if self.closed: | ||
4409 | # Someone from outside stopped the streaming | ||
4410 | self.running = False | ||
4411 | break | ||
4412 | try: | ||
4413 | the_connection = self.connect_func() | ||
4414 | if the_connection.status_code != 200: | ||
4415 | exception = MastodonNetworkError(f"Could not connect to server. " | ||
4416 | f"HTTP status: {the_connection.status_code}") | ||
4417 | listener.on_abort(exception) | ||
4418 | self._sleep_attentive() | ||
4419 | if self.closed: | ||
4420 | # Here we have maybe a rare race condition. Exactly on connect, someone | ||
4421 | # stopped the streaming before. We close the previous established connection: | ||
4422 | the_connection.close() | ||
4423 | else: | ||
4424 | self.connection = the_connection | ||
4425 | connect_success = True | ||
4426 | except: | ||
4427 | self._sleep_attentive() | ||
4428 | connect_success = False | ||
4429 | self.reconnecting = False | ||
4430 | else: | ||
4431 | self.running = False | ||
4432 | return 0 | ||
4433 | |||
4434 | if run_async: | ||
4435 | handle = __stream_handle( | ||
4436 | connection, connect_func, reconnect_async, reconnect_async_wait_sec) | ||
4437 | t = threading.Thread(args=(), target=handle._threadproc) | ||
4438 | t.daemon = True | ||
4439 | t.start() | ||
4440 | return handle | ||
4441 | else: | ||
4442 | # Blocking, never returns (can only leave via exception) | ||
4443 | connection = connect_func() | ||
4444 | with closing(connection) as r: | ||
4445 | listener.handle_stream(r) | ||
4446 | |||
4447 | def __generate_params(self, params, exclude=[]): | ||
4448 | """ | ||
4449 | Internal named-parameters-to-dict helper. | ||
4450 | |||
4451 | Note for developers: If called with locals() as params, | ||
4452 | as is the usual practice in this code, the __generate_params call | ||
4453 | (or at least the locals() call) should generally be the first thing | ||
4454 | in your function. | ||
4455 | """ | ||
4456 | params = collections.OrderedDict(params) | ||
4457 | |||
4458 | if 'self' in params: | ||
4459 | del params['self'] | ||
4460 | |||
4461 | param_keys = list(params.keys()) | ||
4462 | for key in param_keys: | ||
4463 | if isinstance(params[key], bool): | ||
4464 | params[key] = '1' if params[key] else '0' | ||
4465 | |||
4466 | for key in param_keys: | ||
4467 | if params[key] is None or key in exclude: | ||
4468 | del params[key] | ||
4469 | |||
4470 | param_keys = list(params.keys()) | ||
4471 | for key in param_keys: | ||
4472 | if isinstance(params[key], list): | ||
4473 | params[key + "[]"] = params[key] | ||
4474 | del params[key] | ||
4475 | |||
4476 | return params | ||
4477 | |||
4478 | def __unpack_id(self, id, dateconv=False): | ||
4479 | """ | ||
4480 | Internal object-to-id converter | ||
4481 | |||
4482 | Checks if id is a dict that contains id and | ||
4483 | returns the id inside, otherwise just returns | ||
4484 | the id straight. | ||
4485 | |||
4486 | Also unpacks datetimes to snowflake IDs if requested. | ||
4487 | """ | ||
4488 | if isinstance(id, dict) and "id" in id: | ||
4489 | id = id["id"] | ||
4490 | if dateconv and isinstance(id, datetime.datetime): | ||
4491 | id = (int(id.timestamp()) << 16) * 1000 | ||
4492 | return id | ||
4493 | |||
4494 | def __decode_webpush_b64(self, data): | ||
4495 | """ | ||
4496 | Re-pads and decodes urlsafe base64. | ||
4497 | """ | ||
4498 | missing_padding = len(data) % 4 | ||
4499 | if missing_padding != 0: | ||
4500 | data += '=' * (4 - missing_padding) | ||
4501 | return base64.urlsafe_b64decode(data) | ||
4502 | |||
4503 | def __get_token_expired(self): | ||
4504 | """Internal helper for oauth code""" | ||
4505 | return self._token_expired < datetime.datetime.now() | ||
4506 | |||
4507 | def __set_token_expired(self, value): | ||
4508 | """Internal helper for oauth code""" | ||
4509 | self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value) | ||
4510 | return | ||
4511 | |||
4512 | def __get_refresh_token(self): | ||
4513 | """Internal helper for oauth code""" | ||
4514 | return self._refresh_token | ||
4515 | |||
4516 | def __set_refresh_token(self, value): | ||
4517 | """Internal helper for oauth code""" | ||
4518 | self._refresh_token = value | ||
4519 | return | ||
4520 | |||
4521 | def __guess_type(self, media_file): | ||
4522 | """Internal helper to guess media file type""" | ||
4523 | mime_type = None | ||
4524 | try: | ||
4525 | mime_type = magic.from_file(media_file, mime=True) | ||
4526 | except AttributeError: | ||
4527 | mime_type = mimetypes.guess_type(media_file)[0] | ||
4528 | return mime_type | ||
4529 | |||
4530 | def __load_media_file(self, media_file, mime_type=None, file_name=None): | ||
4531 | if isinstance(media_file, PurePath): | ||
4532 | media_file = str(media_file) | ||
4533 | if isinstance(media_file, str) and os.path.isfile(media_file): | ||
4534 | mime_type = self.__guess_type(media_file) | ||
4535 | media_file = open(media_file, 'rb') | ||
4536 | elif isinstance(media_file, str) and os.path.isfile(media_file): | ||
4537 | media_file = open(media_file, 'rb') | ||
4538 | if mime_type is None: | ||
4539 | raise MastodonIllegalArgumentError( | ||
4540 | 'Could not determine mime type or data passed directly without mime type.') | ||
4541 | if file_name is None: | ||
4542 | random_suffix = uuid.uuid4().hex | ||
4543 | file_name = "mastodonpyupload_" + \ | ||
4544 | str(time.time()) + "_" + str(random_suffix) + \ | ||
4545 | mimetypes.guess_extension(mime_type) | ||
4546 | return (file_name, media_file, mime_type) | ||
4547 | |||
4548 | @staticmethod | ||
4549 | def __protocolize(base_url): | ||
4550 | """Internal add-protocol-to-url helper""" | ||
4551 | if not base_url.startswith("http://") and not base_url.startswith("https://"): | ||
4552 | base_url = "https://" + base_url | ||
4553 | |||
4554 | # Some API endpoints can't handle extra /'s in path requests | ||
4555 | base_url = base_url.rstrip("/") | ||
4556 | return base_url | ||
4557 | |||
4558 | @staticmethod | ||
4559 | def __deprotocolize(base_url): | ||
4560 | """Internal helper to strip http and https from a URL""" | ||
4561 | if base_url.startswith("http://"): | ||
4562 | base_url = base_url[7:] | ||
4563 | elif base_url.startswith("https://") or base_url.startswith("onion://"): | ||
4564 | base_url = base_url[8:] | ||
4565 | return base_url | ||
4566 | |||
4567 | ## | ||
4568 | # Exceptions | ||
4569 | ## | ||
4570 | class MastodonError(Exception): | ||
4571 | """Base class for Mastodon.py exceptions""" | ||
4572 | |||
4573 | |||
4574 | class MastodonVersionError(MastodonError): | ||
4575 | """Raised when a function is called that the version of Mastodon for which | ||
4576 | Mastodon.py was instantiated does not support""" | ||
4577 | |||
4578 | |||
4579 | class MastodonIllegalArgumentError(ValueError, MastodonError): | ||
4580 | """Raised when an incorrect parameter is passed to a function""" | ||
4581 | pass | ||
4582 | |||
4583 | |||
4584 | class MastodonIOError(IOError, MastodonError): | ||
4585 | """Base class for Mastodon.py I/O errors""" | ||
4586 | |||
4587 | |||
4588 | class MastodonFileNotFoundError(MastodonIOError): | ||
4589 | """Raised when a file requested to be loaded can not be opened""" | ||
4590 | pass | ||
4591 | |||
4592 | |||
4593 | class MastodonNetworkError(MastodonIOError): | ||
4594 | """Raised when network communication with the server fails""" | ||
4595 | pass | ||
4596 | |||
4597 | |||
4598 | class MastodonReadTimeout(MastodonNetworkError): | ||
4599 | """Raised when a stream times out""" | ||
4600 | pass | ||
4601 | |||
4602 | |||
4603 | class MastodonAPIError(MastodonError): | ||
4604 | """Raised when the mastodon API generates a response that cannot be handled""" | ||
4605 | pass | ||
4606 | |||
4607 | |||
4608 | class MastodonServerError(MastodonAPIError): | ||
4609 | """Raised if the Server is malconfigured and returns a 5xx error code""" | ||
4610 | pass | ||
4611 | |||
4612 | |||
4613 | class MastodonInternalServerError(MastodonServerError): | ||
4614 | """Raised if the Server returns a 500 error""" | ||
4615 | pass | ||
4616 | |||
4617 | |||
4618 | class MastodonBadGatewayError(MastodonServerError): | ||
4619 | """Raised if the Server returns a 502 error""" | ||
4620 | pass | ||
4621 | |||
4622 | |||
4623 | class MastodonServiceUnavailableError(MastodonServerError): | ||
4624 | """Raised if the Server returns a 503 error""" | ||
4625 | pass | ||
4626 | |||
4627 | |||
4628 | class MastodonGatewayTimeoutError(MastodonServerError): | ||
4629 | """Raised if the Server returns a 504 error""" | ||
4630 | pass | ||
4631 | |||
4632 | |||
4633 | class MastodonNotFoundError(MastodonAPIError): | ||
4634 | """Raised when the Mastodon API returns a 404 Not Found error""" | ||
4635 | pass | ||
4636 | |||
4637 | |||
4638 | class MastodonUnauthorizedError(MastodonAPIError): | ||
4639 | """Raised when the Mastodon API returns a 401 Unauthorized error | ||
4640 | |||
4641 | This happens when an OAuth token is invalid or has been revoked, | ||
4642 | or when trying to access an endpoint that can't be used without | ||
4643 | authentication without providing credentials.""" | ||
4644 | pass | ||
4645 | |||
4646 | |||
4647 | class MastodonRatelimitError(MastodonError): | ||
4648 | """Raised when rate limiting is set to manual mode and the rate limit is exceeded""" | ||
4649 | pass | ||
4650 | |||
4651 | |||
4652 | class MastodonMalformedEventError(MastodonError): | ||
4653 | """Raised when the server-sent event stream is malformed""" | ||
4654 | pass | ||
diff --git a/mastodon/compat.py b/mastodon/compat.py new file mode 100644 index 0000000..905bfa7 --- /dev/null +++ b/mastodon/compat.py | |||
@@ -0,0 +1,45 @@ | |||
1 | # compat.py - backwards compatible optional imports | ||
2 | |||
3 | IMPL_HAS_CRYPTO = True | ||
4 | try: | ||
5 | import cryptography | ||
6 | from cryptography.hazmat.backends import default_backend | ||
7 | from cryptography.hazmat.primitives.asymmetric import ec | ||
8 | from cryptography.hazmat.primitives import serialization | ||
9 | except: | ||
10 | IMPL_HAS_CRYPTO = False | ||
11 | cryptography = None | ||
12 | default_backend = None | ||
13 | ec = None | ||
14 | serialization = None | ||
15 | |||
16 | IMPL_HAS_ECE = True | ||
17 | try: | ||
18 | import http_ece | ||
19 | except: | ||
20 | IMPL_HAS_ECE = False | ||
21 | http_ece = None | ||
22 | |||
23 | IMPL_HAS_BLURHASH = True | ||
24 | try: | ||
25 | import blurhash | ||
26 | except: | ||
27 | IMPL_HAS_BLURHASH = False | ||
28 | blurhash = None | ||
29 | |||
30 | try: | ||
31 | from urllib.parse import urlparse | ||
32 | except ImportError: | ||
33 | from urlparse import urlparse | ||
34 | |||
35 | try: | ||
36 | import magic | ||
37 | except ImportError: | ||
38 | magic = None | ||
39 | |||
40 | try: | ||
41 | from pathlib import PurePath | ||
42 | except: | ||
43 | class PurePath: | ||
44 | pass | ||
45 | |||
diff --git a/mastodon/defaults.py b/mastodon/defaults.py new file mode 100644 index 0000000..950ffa2 --- /dev/null +++ b/mastodon/defaults.py | |||
@@ -0,0 +1,64 @@ | |||
1 | # defaults.py - default values for various parameters | ||
2 | |||
3 | _DEFAULT_TIMEOUT = 300 | ||
4 | _DEFAULT_STREAM_TIMEOUT = 300 | ||
5 | _DEFAULT_STREAM_RECONNECT_WAIT_SEC = 5 | ||
6 | _DEFAULT_SCOPES = ['read', 'write', 'follow', 'push'] | ||
7 | _SCOPE_SETS = { | ||
8 | 'read': [ | ||
9 | 'read:accounts', | ||
10 | 'read:blocks', | ||
11 | 'read:favourites', | ||
12 | 'read:filters', | ||
13 | 'read:follows', | ||
14 | 'read:lists', | ||
15 | 'read:mutes', | ||
16 | 'read:notifications', | ||
17 | 'read:search', | ||
18 | 'read:statuses', | ||
19 | 'read:bookmarks' | ||
20 | ], | ||
21 | 'write': [ | ||
22 | 'write:accounts', | ||
23 | 'write:blocks', | ||
24 | 'write:favourites', | ||
25 | 'write:filters', | ||
26 | 'write:follows', | ||
27 | 'write:lists', | ||
28 | 'write:media', | ||
29 | 'write:mutes', | ||
30 | 'write:notifications', | ||
31 | 'write:reports', | ||
32 | 'write:statuses', | ||
33 | 'write:bookmarks' | ||
34 | ], | ||
35 | 'follow': [ | ||
36 | 'read:blocks', | ||
37 | 'read:follows', | ||
38 | 'read:mutes', | ||
39 | 'write:blocks', | ||
40 | 'write:follows', | ||
41 | 'write:mutes', | ||
42 | ], | ||
43 | 'admin:read': [ | ||
44 | 'admin:read:accounts', | ||
45 | 'admin:read:reports', | ||
46 | 'admin:read:domain_allows', | ||
47 | 'admin:read:domain_blocks', | ||
48 | 'admin:read:ip_blocks', | ||
49 | 'admin:read:email_domain_blocks', | ||
50 | 'admin:read:canonical_email_blocks', | ||
51 | ], | ||
52 | 'admin:write': [ | ||
53 | 'admin:write:accounts', | ||
54 | 'admin:write:reports', | ||
55 | 'admin:write:domain_allows', | ||
56 | 'admin:write:domain_blocks', | ||
57 | 'admin:write:ip_blocks', | ||
58 | 'admin:write:email_domain_blocks', | ||
59 | 'admin:write:canonical_email_blocks', | ||
60 | ], | ||
61 | } | ||
62 | _VALID_SCOPES = ['read', 'write', 'follow', 'push', 'admin:read', 'admin:write'] + \ | ||
63 | _SCOPE_SETS['read'] + _SCOPE_SETS['write'] + \ | ||
64 | _SCOPE_SETS['admin:read'] + _SCOPE_SETS['admin:write'] \ No newline at end of file | ||
diff --git a/mastodon/error.py b/mastodon/error.py new file mode 100644 index 0000000..85cc313 --- /dev/null +++ b/mastodon/error.py | |||
@@ -0,0 +1,90 @@ | |||
1 | # error.py - error classes | ||
2 | |||
3 | ## | ||
4 | # Exceptions | ||
5 | ## | ||
6 | class MastodonError(Exception): | ||
7 | """Base class for Mastodon.py exceptions""" | ||
8 | |||
9 | |||
10 | class MastodonVersionError(MastodonError): | ||
11 | """Raised when a function is called that the version of Mastodon for which | ||
12 | Mastodon.py was instantiated does not support""" | ||
13 | |||
14 | |||
15 | class MastodonIllegalArgumentError(ValueError, MastodonError): | ||
16 | """Raised when an incorrect parameter is passed to a function""" | ||
17 | pass | ||
18 | |||
19 | |||
20 | class MastodonIOError(IOError, MastodonError): | ||
21 | """Base class for Mastodon.py I/O errors""" | ||
22 | |||
23 | |||
24 | class MastodonFileNotFoundError(MastodonIOError): | ||
25 | """Raised when a file requested to be loaded can not be opened""" | ||
26 | pass | ||
27 | |||
28 | |||
29 | class MastodonNetworkError(MastodonIOError): | ||
30 | """Raised when network communication with the server fails""" | ||
31 | pass | ||
32 | |||
33 | |||
34 | class MastodonReadTimeout(MastodonNetworkError): | ||
35 | """Raised when a stream times out""" | ||
36 | pass | ||
37 | |||
38 | |||
39 | class MastodonAPIError(MastodonError): | ||
40 | """Raised when the mastodon API generates a response that cannot be handled""" | ||
41 | pass | ||
42 | |||
43 | |||
44 | class MastodonServerError(MastodonAPIError): | ||
45 | """Raised if the Server is malconfigured and returns a 5xx error code""" | ||
46 | pass | ||
47 | |||
48 | |||
49 | class MastodonInternalServerError(MastodonServerError): | ||
50 | """Raised if the Server returns a 500 error""" | ||
51 | pass | ||
52 | |||
53 | |||
54 | class MastodonBadGatewayError(MastodonServerError): | ||
55 | """Raised if the Server returns a 502 error""" | ||
56 | pass | ||
57 | |||
58 | |||
59 | class MastodonServiceUnavailableError(MastodonServerError): | ||
60 | """Raised if the Server returns a 503 error""" | ||
61 | pass | ||
62 | |||
63 | |||
64 | class MastodonGatewayTimeoutError(MastodonServerError): | ||
65 | """Raised if the Server returns a 504 error""" | ||
66 | pass | ||
67 | |||
68 | |||
69 | class MastodonNotFoundError(MastodonAPIError): | ||
70 | """Raised when the Mastodon API returns a 404 Not Found error""" | ||
71 | pass | ||
72 | |||
73 | |||
74 | class MastodonUnauthorizedError(MastodonAPIError): | ||
75 | """Raised when the Mastodon API returns a 401 Unauthorized error | ||
76 | |||
77 | This happens when an OAuth token is invalid or has been revoked, | ||
78 | or when trying to access an endpoint that can't be used without | ||
79 | authentication without providing credentials.""" | ||
80 | pass | ||
81 | |||
82 | |||
83 | class MastodonRatelimitError(MastodonError): | ||
84 | """Raised when rate limiting is set to manual mode and the rate limit is exceeded""" | ||
85 | pass | ||
86 | |||
87 | |||
88 | class MastodonMalformedEventError(MastodonError): | ||
89 | """Raised when the server-sent event stream is malformed""" | ||
90 | pass | ||
diff --git a/mastodon/internals.py b/mastodon/internals.py new file mode 100644 index 0000000..a19ed77 --- /dev/null +++ b/mastodon/internals.py | |||
@@ -0,0 +1,664 @@ | |||
1 | import datetime | ||
2 | from contextlib import closing | ||
3 | import mimetypes | ||
4 | import threading | ||
5 | import six | ||
6 | import uuid | ||
7 | import pytz | ||
8 | import dateutil.parser | ||
9 | import time | ||
10 | import copy | ||
11 | import requests | ||
12 | import re | ||
13 | import collections | ||
14 | import base64 | ||
15 | import os | ||
16 | |||
17 | from .utility import AttribAccessDict, AttribAccessList | ||
18 | from .error import MastodonNetworkError, MastodonIllegalArgumentError, MastodonRatelimitError, MastodonNotFoundError, \ | ||
19 | MastodonUnauthorizedError, MastodonInternalServerError, MastodonBadGatewayError, MastodonServiceUnavailableError, \ | ||
20 | MastodonGatewayTimeoutError, MastodonServerError, MastodonAPIError, MastodonMalformedEventError | ||
21 | from .compat import urlparse, magic, PurePath | ||
22 | from .defaults import _DEFAULT_STREAM_TIMEOUT, _DEFAULT_STREAM_RECONNECT_WAIT_SEC | ||
23 | |||
24 | ### | ||
25 | # Internal helpers, dragons probably | ||
26 | ### | ||
27 | class Mastodon(): | ||
28 | def __datetime_to_epoch(self, date_time): | ||
29 | """ | ||
30 | Converts a python datetime to unix epoch, accounting for | ||
31 | time zones and such. | ||
32 | |||
33 | Assumes UTC if timezone is not given. | ||
34 | """ | ||
35 | date_time_utc = None | ||
36 | if date_time.tzinfo is None: | ||
37 | date_time_utc = date_time.replace(tzinfo=pytz.utc) | ||
38 | else: | ||
39 | date_time_utc = date_time.astimezone(pytz.utc) | ||
40 | |||
41 | epoch_utc = datetime.datetime.utcfromtimestamp(0).replace(tzinfo=pytz.utc) | ||
42 | |||
43 | return (date_time_utc - epoch_utc).total_seconds() | ||
44 | |||
45 | def __get_logged_in_id(self): | ||
46 | """ | ||
47 | Fetch the logged in user's ID, with caching. ID is reset on calls to log_in. | ||
48 | """ | ||
49 | if self.__logged_in_id is None: | ||
50 | self.__logged_in_id = self.account_verify_credentials().id | ||
51 | return self.__logged_in_id | ||
52 | |||
53 | @staticmethod | ||
54 | def __json_allow_dict_attrs(json_object): | ||
55 | """ | ||
56 | Makes it possible to use attribute notation to access a dicts | ||
57 | elements, while still allowing the dict to act as a dict. | ||
58 | """ | ||
59 | if isinstance(json_object, dict): | ||
60 | return AttribAccessDict(json_object) | ||
61 | return json_object | ||
62 | |||
63 | @staticmethod | ||
64 | def __json_date_parse(json_object): | ||
65 | """ | ||
66 | Parse dates in certain known json fields, if possible. | ||
67 | """ | ||
68 | known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at", | ||
69 | "updated_at", "last_status_at", "starts_at", "ends_at", "published_at", "edited_at"] | ||
70 | mark_delete = [] | ||
71 | for k, v in json_object.items(): | ||
72 | if k in known_date_fields: | ||
73 | if v is not None: | ||
74 | try: | ||
75 | if isinstance(v, int): | ||
76 | json_object[k] = datetime.datetime.fromtimestamp(v, pytz.utc) | ||
77 | else: | ||
78 | json_object[k] = dateutil.parser.parse(v) | ||
79 | except: | ||
80 | # When we can't parse a date, we just leave the field out | ||
81 | mark_delete.append(k) | ||
82 | # Two step process because otherwise python gets very upset | ||
83 | for k in mark_delete: | ||
84 | del json_object[k] | ||
85 | return json_object | ||
86 | |||
87 | @staticmethod | ||
88 | def __json_truefalse_parse(json_object): | ||
89 | """ | ||
90 | Parse 'True' / 'False' strings in certain known fields | ||
91 | """ | ||
92 | for key in ('follow', 'favourite', 'reblog', 'mention'): | ||
93 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
94 | if json_object[key].lower() == 'true': | ||
95 | json_object[key] = True | ||
96 | if json_object[key].lower() == 'false': | ||
97 | json_object[key] = False | ||
98 | return json_object | ||
99 | |||
100 | @staticmethod | ||
101 | def __json_strnum_to_bignum(json_object): | ||
102 | """ | ||
103 | Converts json string numerals to native python bignums. | ||
104 | """ | ||
105 | for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses', 'day', 'last_read_id'): | ||
106 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
107 | try: | ||
108 | json_object[key] = int(json_object[key]) | ||
109 | except ValueError: | ||
110 | pass | ||
111 | |||
112 | return json_object | ||
113 | |||
114 | @staticmethod | ||
115 | def __json_hooks(json_object): | ||
116 | """ | ||
117 | All the json hooks. Used in request parsing. | ||
118 | """ | ||
119 | json_object = Mastodon.__json_strnum_to_bignum(json_object) | ||
120 | json_object = Mastodon.__json_date_parse(json_object) | ||
121 | json_object = Mastodon.__json_truefalse_parse(json_object) | ||
122 | json_object = Mastodon.__json_allow_dict_attrs(json_object) | ||
123 | return json_object | ||
124 | |||
125 | @staticmethod | ||
126 | def __consistent_isoformat_utc(datetime_val): | ||
127 | """ | ||
128 | Function that does what isoformat does but it actually does the same | ||
129 | every time instead of randomly doing different things on some systems | ||
130 | and also it represents that time as the equivalent UTC time. | ||
131 | """ | ||
132 | isotime = datetime_val.astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%S%z") | ||
133 | if isotime[-2] != ":": | ||
134 | isotime = isotime[:-2] + ":" + isotime[-2:] | ||
135 | return isotime | ||
136 | |||
137 | def __api_request(self, method, endpoint, params={}, files={}, headers={}, access_token_override=None, base_url_override=None, | ||
138 | do_ratelimiting=True, use_json=False, parse=True, return_response_object=False, skip_error_check=False, lang_override=None): | ||
139 | """ | ||
140 | Internal API request helper. | ||
141 | """ | ||
142 | response = None | ||
143 | remaining_wait = 0 | ||
144 | |||
145 | # Add language to params if not None | ||
146 | lang = self.lang | ||
147 | if lang_override is not None: | ||
148 | lang = lang_override | ||
149 | if lang is not None: | ||
150 | params["lang"] = lang | ||
151 | |||
152 | # "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it | ||
153 | # would take to not hit the rate limit at that request rate. | ||
154 | if do_ratelimiting and self.ratelimit_method == "pace": | ||
155 | if self.ratelimit_remaining == 0: | ||
156 | to_next = self.ratelimit_reset - time.time() | ||
157 | if to_next > 0: | ||
158 | # As a precaution, never sleep longer than 5 minutes | ||
159 | to_next = min(to_next, 5 * 60) | ||
160 | time.sleep(to_next) | ||
161 | else: | ||
162 | time_waited = time.time() - self.ratelimit_lastcall | ||
163 | time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining) | ||
164 | remaining_wait = time_wait - time_waited | ||
165 | |||
166 | if remaining_wait > 0: | ||
167 | to_next = remaining_wait / self.ratelimit_pacefactor | ||
168 | to_next = min(to_next, 5 * 60) | ||
169 | time.sleep(to_next) | ||
170 | |||
171 | # Generate request headers | ||
172 | headers = copy.deepcopy(headers) | ||
173 | if self.access_token is not None: | ||
174 | headers['Authorization'] = 'Bearer ' + self.access_token | ||
175 | if access_token_override is not None: | ||
176 | headers['Authorization'] = 'Bearer ' + access_token_override | ||
177 | |||
178 | # Add user-agent | ||
179 | if self.user_agent: | ||
180 | headers['User-Agent'] = self.user_agent | ||
181 | |||
182 | # Determine base URL | ||
183 | base_url = self.api_base_url | ||
184 | if base_url_override is not None: | ||
185 | base_url = base_url_override | ||
186 | |||
187 | if self.debug_requests: | ||
188 | print('Mastodon: Request to endpoint "' + base_url + | ||
189 | endpoint + '" using method "' + method + '".') | ||
190 | print('Parameters: ' + str(params)) | ||
191 | print('Headers: ' + str(headers)) | ||
192 | print('Files: ' + str(files)) | ||
193 | |||
194 | # Make request | ||
195 | request_complete = False | ||
196 | while not request_complete: | ||
197 | request_complete = True | ||
198 | |||
199 | response_object = None | ||
200 | try: | ||
201 | kwargs = dict(headers=headers, files=files, timeout=self.request_timeout) | ||
202 | if use_json: | ||
203 | kwargs['json'] = params | ||
204 | elif method == 'GET': | ||
205 | kwargs['params'] = params | ||
206 | else: | ||
207 | kwargs['data'] = params | ||
208 | |||
209 | response_object = self.session.request(method, base_url + endpoint, **kwargs) | ||
210 | except Exception as e: | ||
211 | raise MastodonNetworkError("Could not complete request: %s" % e) | ||
212 | |||
213 | if response_object is None: | ||
214 | raise MastodonIllegalArgumentError("Illegal request.") | ||
215 | |||
216 | # Parse rate limiting headers | ||
217 | if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting: | ||
218 | self.ratelimit_remaining = int( | ||
219 | response_object.headers['X-RateLimit-Remaining']) | ||
220 | self.ratelimit_limit = int( | ||
221 | response_object.headers['X-RateLimit-Limit']) | ||
222 | |||
223 | # For gotosocial, we need an int representation, but for non-ints this would crash | ||
224 | try: | ||
225 | ratelimit_intrep = str( | ||
226 | int(response_object.headers['X-RateLimit-Reset'])) | ||
227 | except: | ||
228 | ratelimit_intrep = None | ||
229 | |||
230 | try: | ||
231 | if ratelimit_intrep is not None and ratelimit_intrep == response_object.headers['X-RateLimit-Reset']: | ||
232 | self.ratelimit_reset = int( | ||
233 | response_object.headers['X-RateLimit-Reset']) | ||
234 | else: | ||
235 | ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset']) | ||
236 | self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime) | ||
237 | |||
238 | # Adjust server time to local clock | ||
239 | if 'Date' in response_object.headers: | ||
240 | server_time_datetime = dateutil.parser.parse(response_object.headers['Date']) | ||
241 | server_time = self.__datetime_to_epoch(server_time_datetime) | ||
242 | server_time_diff = time.time() - server_time | ||
243 | self.ratelimit_reset += server_time_diff | ||
244 | self.ratelimit_lastcall = time.time() | ||
245 | except Exception as e: | ||
246 | raise MastodonRatelimitError("Rate limit time calculations failed: %s" % e) | ||
247 | |||
248 | # Handle response | ||
249 | if self.debug_requests: | ||
250 | print('Mastodon: Response received with code ' + str(response_object.status_code) + '.') | ||
251 | print('response headers: ' + str(response_object.headers)) | ||
252 | print('Response text content: ' + str(response_object.text)) | ||
253 | |||
254 | if not response_object.ok: | ||
255 | try: | ||
256 | response = response_object.json(object_hook=self.__json_hooks) | ||
257 | if isinstance(response, dict) and 'error' in response: | ||
258 | error_msg = response['error'] | ||
259 | elif isinstance(response, str): | ||
260 | error_msg = response | ||
261 | else: | ||
262 | error_msg = None | ||
263 | except ValueError: | ||
264 | error_msg = None | ||
265 | |||
266 | # Handle rate limiting | ||
267 | if response_object.status_code == 429: | ||
268 | if self.ratelimit_method == 'throw' or not do_ratelimiting: | ||
269 | raise MastodonRatelimitError('Hit rate limit.') | ||
270 | elif self.ratelimit_method in ('wait', 'pace'): | ||
271 | to_next = self.ratelimit_reset - time.time() | ||
272 | if to_next > 0: | ||
273 | # As a precaution, never sleep longer than 5 minutes | ||
274 | to_next = min(to_next, 5 * 60) | ||
275 | time.sleep(to_next) | ||
276 | request_complete = False | ||
277 | continue | ||
278 | |||
279 | if not skip_error_check: | ||
280 | if response_object.status_code == 404: | ||
281 | ex_type = MastodonNotFoundError | ||
282 | if not error_msg: | ||
283 | error_msg = 'Endpoint not found.' | ||
284 | # this is for compatibility with older versions | ||
285 | # which raised MastodonAPIError('Endpoint not found.') | ||
286 | # on any 404 | ||
287 | elif response_object.status_code == 401: | ||
288 | ex_type = MastodonUnauthorizedError | ||
289 | elif response_object.status_code == 500: | ||
290 | ex_type = MastodonInternalServerError | ||
291 | elif response_object.status_code == 502: | ||
292 | ex_type = MastodonBadGatewayError | ||
293 | elif response_object.status_code == 503: | ||
294 | ex_type = MastodonServiceUnavailableError | ||
295 | elif response_object.status_code == 504: | ||
296 | ex_type = MastodonGatewayTimeoutError | ||
297 | elif response_object.status_code >= 500 and response_object.status_code <= 511: | ||
298 | ex_type = MastodonServerError | ||
299 | else: | ||
300 | ex_type = MastodonAPIError | ||
301 | |||
302 | raise ex_type('Mastodon API returned error', response_object.status_code, response_object.reason, error_msg) | ||
303 | |||
304 | if return_response_object: | ||
305 | return response_object | ||
306 | |||
307 | if parse: | ||
308 | try: | ||
309 | response = response_object.json(object_hook=self.__json_hooks) | ||
310 | except: | ||
311 | raise MastodonAPIError( | ||
312 | "Could not parse response as JSON, response code was %s, " | ||
313 | "bad json content was '%s'" % (response_object.status_code, | ||
314 | response_object.content)) | ||
315 | else: | ||
316 | response = response_object.content | ||
317 | |||
318 | # Parse link headers | ||
319 | if isinstance(response, list) and \ | ||
320 | 'Link' in response_object.headers and \ | ||
321 | response_object.headers['Link'] != "": | ||
322 | response = AttribAccessList(response) | ||
323 | tmp_urls = requests.utils.parse_header_links( | ||
324 | response_object.headers['Link'].rstrip('>').replace('>,<', ',<')) | ||
325 | for url in tmp_urls: | ||
326 | if 'rel' not in url: | ||
327 | continue | ||
328 | |||
329 | if url['rel'] == 'next': | ||
330 | # Be paranoid and extract max_id specifically | ||
331 | next_url = url['url'] | ||
332 | matchgroups = re.search(r"[?&]max_id=([^&]+)", next_url) | ||
333 | |||
334 | if matchgroups: | ||
335 | next_params = copy.deepcopy(params) | ||
336 | next_params['_pagination_method'] = method | ||
337 | next_params['_pagination_endpoint'] = endpoint | ||
338 | max_id = matchgroups.group(1) | ||
339 | if max_id.isdigit(): | ||
340 | next_params['max_id'] = int(max_id) | ||
341 | else: | ||
342 | next_params['max_id'] = max_id | ||
343 | if "since_id" in next_params: | ||
344 | del next_params['since_id'] | ||
345 | if "min_id" in next_params: | ||
346 | del next_params['min_id'] | ||
347 | response._pagination_next = next_params | ||
348 | |||
349 | # Maybe other API users rely on the pagination info in the last item | ||
350 | # Will be removed in future | ||
351 | if isinstance(response[-1], AttribAccessDict): | ||
352 | response[-1]._pagination_next = next_params | ||
353 | |||
354 | if url['rel'] == 'prev': | ||
355 | # Be paranoid and extract since_id or min_id specifically | ||
356 | prev_url = url['url'] | ||
357 | |||
358 | # Old and busted (pre-2.6.0): since_id pagination | ||
359 | matchgroups = re.search( | ||
360 | r"[?&]since_id=([^&]+)", prev_url) | ||
361 | if matchgroups: | ||
362 | prev_params = copy.deepcopy(params) | ||
363 | prev_params['_pagination_method'] = method | ||
364 | prev_params['_pagination_endpoint'] = endpoint | ||
365 | since_id = matchgroups.group(1) | ||
366 | if since_id.isdigit(): | ||
367 | prev_params['since_id'] = int(since_id) | ||
368 | else: | ||
369 | prev_params['since_id'] = since_id | ||
370 | if "max_id" in prev_params: | ||
371 | del prev_params['max_id'] | ||
372 | response._pagination_prev = prev_params | ||
373 | |||
374 | # Maybe other API users rely on the pagination info in the first item | ||
375 | # Will be removed in future | ||
376 | if isinstance(response[0], AttribAccessDict): | ||
377 | response[0]._pagination_prev = prev_params | ||
378 | |||
379 | # New and fantastico (post-2.6.0): min_id pagination | ||
380 | matchgroups = re.search( | ||
381 | r"[?&]min_id=([^&]+)", prev_url) | ||
382 | if matchgroups: | ||
383 | prev_params = copy.deepcopy(params) | ||
384 | prev_params['_pagination_method'] = method | ||
385 | prev_params['_pagination_endpoint'] = endpoint | ||
386 | min_id = matchgroups.group(1) | ||
387 | if min_id.isdigit(): | ||
388 | prev_params['min_id'] = int(min_id) | ||
389 | else: | ||
390 | prev_params['min_id'] = min_id | ||
391 | if "max_id" in prev_params: | ||
392 | del prev_params['max_id'] | ||
393 | response._pagination_prev = prev_params | ||
394 | |||
395 | # Maybe other API users rely on the pagination info in the first item | ||
396 | # Will be removed in future | ||
397 | if isinstance(response[0], AttribAccessDict): | ||
398 | response[0]._pagination_prev = prev_params | ||
399 | |||
400 | return response | ||
401 | |||
402 | def __get_streaming_base(self): | ||
403 | """ | ||
404 | Internal streaming API helper. | ||
405 | |||
406 | Returns the correct URL for the streaming API. | ||
407 | """ | ||
408 | instance = self.instance() | ||
409 | if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url: | ||
410 | # This is probably a websockets URL, which is really for the browser, but requests can't handle it | ||
411 | # So we do this below to turn it into an HTTPS or HTTP URL | ||
412 | parse = urlparse(instance["urls"]["streaming_api"]) | ||
413 | if parse.scheme == 'wss': | ||
414 | url = "https://" + parse.netloc | ||
415 | elif parse.scheme == 'ws': | ||
416 | url = "http://" + parse.netloc | ||
417 | else: | ||
418 | raise MastodonAPIError( | ||
419 | "Could not parse streaming api location returned from server: {}.".format( | ||
420 | instance["urls"]["streaming_api"])) | ||
421 | else: | ||
422 | url = self.api_base_url | ||
423 | return url | ||
424 | |||
425 | def __stream(self, endpoint, listener, params={}, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): | ||
426 | """ | ||
427 | Internal streaming API helper. | ||
428 | |||
429 | Returns a handle to the open connection that the user can close if they | ||
430 | wish to terminate it. | ||
431 | """ | ||
432 | |||
433 | # Check if we have to redirect | ||
434 | url = self.__get_streaming_base() | ||
435 | |||
436 | # The streaming server can't handle two slashes in a path, so remove trailing slashes | ||
437 | if url[-1] == '/': | ||
438 | url = url[:-1] | ||
439 | |||
440 | # Connect function (called and then potentially passed to async handler) | ||
441 | def connect_func(): | ||
442 | headers = {"Authorization": "Bearer " + | ||
443 | self.access_token} if self.access_token else {} | ||
444 | if self.user_agent: | ||
445 | headers['User-Agent'] = self.user_agent | ||
446 | connection = self.session.get(url + endpoint, headers=headers, data=params, stream=True, | ||
447 | timeout=(self.request_timeout, timeout)) | ||
448 | |||
449 | if connection.status_code != 200: | ||
450 | raise MastodonNetworkError( | ||
451 | "Could not connect to streaming server: %s" % connection.reason) | ||
452 | return connection | ||
453 | connection = None | ||
454 | |||
455 | # Async stream handler | ||
456 | class __stream_handle(): | ||
457 | def __init__(self, connection, connect_func, reconnect_async, reconnect_async_wait_sec): | ||
458 | self.closed = False | ||
459 | self.running = True | ||
460 | self.connection = connection | ||
461 | self.connect_func = connect_func | ||
462 | self.reconnect_async = reconnect_async | ||
463 | self.reconnect_async_wait_sec = reconnect_async_wait_sec | ||
464 | self.reconnecting = False | ||
465 | |||
466 | def close(self): | ||
467 | self.closed = True | ||
468 | if self.connection is not None: | ||
469 | self.connection.close() | ||
470 | |||
471 | def is_alive(self): | ||
472 | return self._thread.is_alive() | ||
473 | |||
474 | def is_receiving(self): | ||
475 | if self.closed or not self.running or self.reconnecting or not self.is_alive(): | ||
476 | return False | ||
477 | else: | ||
478 | return True | ||
479 | |||
480 | def _sleep_attentive(self): | ||
481 | if self._thread != threading.current_thread(): | ||
482 | raise RuntimeError( | ||
483 | "Illegal call from outside the stream_handle thread") | ||
484 | time_remaining = self.reconnect_async_wait_sec | ||
485 | while time_remaining > 0 and not self.closed: | ||
486 | time.sleep(0.5) | ||
487 | time_remaining -= 0.5 | ||
488 | |||
489 | def _threadproc(self): | ||
490 | self._thread = threading.current_thread() | ||
491 | |||
492 | # Run until closed or until error if not autoreconnecting | ||
493 | while self.running: | ||
494 | if self.connection is not None: | ||
495 | with closing(self.connection) as r: | ||
496 | try: | ||
497 | listener.handle_stream(r) | ||
498 | except (AttributeError, MastodonMalformedEventError, MastodonNetworkError) as e: | ||
499 | if not (self.closed or self.reconnect_async): | ||
500 | raise e | ||
501 | else: | ||
502 | if self.closed: | ||
503 | self.running = False | ||
504 | |||
505 | # Reconnect loop. Try immediately once, then with delays on error. | ||
506 | if (self.reconnect_async and not self.closed) or self.connection is None: | ||
507 | self.reconnecting = True | ||
508 | connect_success = False | ||
509 | while not connect_success: | ||
510 | if self.closed: | ||
511 | # Someone from outside stopped the streaming | ||
512 | self.running = False | ||
513 | break | ||
514 | try: | ||
515 | the_connection = self.connect_func() | ||
516 | if the_connection.status_code != 200: | ||
517 | exception = MastodonNetworkError(f"Could not connect to server. " | ||
518 | f"HTTP status: {the_connection.status_code}") | ||
519 | listener.on_abort(exception) | ||
520 | self._sleep_attentive() | ||
521 | if self.closed: | ||
522 | # Here we have maybe a rare race condition. Exactly on connect, someone | ||
523 | # stopped the streaming before. We close the previous established connection: | ||
524 | the_connection.close() | ||
525 | else: | ||
526 | self.connection = the_connection | ||
527 | connect_success = True | ||
528 | except: | ||
529 | self._sleep_attentive() | ||
530 | connect_success = False | ||
531 | self.reconnecting = False | ||
532 | else: | ||
533 | self.running = False | ||
534 | return 0 | ||
535 | |||
536 | if run_async: | ||
537 | handle = __stream_handle( | ||
538 | connection, connect_func, reconnect_async, reconnect_async_wait_sec) | ||
539 | t = threading.Thread(args=(), target=handle._threadproc) | ||
540 | t.daemon = True | ||
541 | t.start() | ||
542 | return handle | ||
543 | else: | ||
544 | # Blocking, never returns (can only leave via exception) | ||
545 | connection = connect_func() | ||
546 | with closing(connection) as r: | ||
547 | listener.handle_stream(r) | ||
548 | |||
549 | def __generate_params(self, params, exclude=[]): | ||
550 | """ | ||
551 | Internal named-parameters-to-dict helper. | ||
552 | |||
553 | Note for developers: If called with locals() as params, | ||
554 | as is the usual practice in this code, the __generate_params call | ||
555 | (or at least the locals() call) should generally be the first thing | ||
556 | in your function. | ||
557 | """ | ||
558 | params = collections.OrderedDict(params) | ||
559 | |||
560 | if 'self' in params: | ||
561 | del params['self'] | ||
562 | |||
563 | param_keys = list(params.keys()) | ||
564 | for key in param_keys: | ||
565 | if isinstance(params[key], bool): | ||
566 | params[key] = '1' if params[key] else '0' | ||
567 | |||
568 | for key in param_keys: | ||
569 | if params[key] is None or key in exclude: | ||
570 | del params[key] | ||
571 | |||
572 | param_keys = list(params.keys()) | ||
573 | for key in param_keys: | ||
574 | if isinstance(params[key], list): | ||
575 | params[key + "[]"] = params[key] | ||
576 | del params[key] | ||
577 | |||
578 | return params | ||
579 | |||
580 | def __unpack_id(self, id, dateconv=False): | ||
581 | """ | ||
582 | Internal object-to-id converter | ||
583 | |||
584 | Checks if id is a dict that contains id and | ||
585 | returns the id inside, otherwise just returns | ||
586 | the id straight. | ||
587 | |||
588 | Also unpacks datetimes to snowflake IDs if requested. | ||
589 | """ | ||
590 | if isinstance(id, dict) and "id" in id: | ||
591 | id = id["id"] | ||
592 | if dateconv and isinstance(id, datetime.datetime): | ||
593 | id = (int(id.timestamp()) << 16) * 1000 | ||
594 | return id | ||
595 | |||
596 | def __decode_webpush_b64(self, data): | ||
597 | """ | ||
598 | Re-pads and decodes urlsafe base64. | ||
599 | """ | ||
600 | missing_padding = len(data) % 4 | ||
601 | if missing_padding != 0: | ||
602 | data += '=' * (4 - missing_padding) | ||
603 | return base64.urlsafe_b64decode(data) | ||
604 | |||
605 | def __get_token_expired(self): | ||
606 | """Internal helper for oauth code""" | ||
607 | return self._token_expired < datetime.datetime.now() | ||
608 | |||
609 | def __set_token_expired(self, value): | ||
610 | """Internal helper for oauth code""" | ||
611 | self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value) | ||
612 | return | ||
613 | |||
614 | def __get_refresh_token(self): | ||
615 | """Internal helper for oauth code""" | ||
616 | return self._refresh_token | ||
617 | |||
618 | def __set_refresh_token(self, value): | ||
619 | """Internal helper for oauth code""" | ||
620 | self._refresh_token = value | ||
621 | return | ||
622 | |||
623 | def __guess_type(self, media_file): | ||
624 | """Internal helper to guess media file type""" | ||
625 | mime_type = None | ||
626 | try: | ||
627 | mime_type = magic.from_file(media_file, mime=True) | ||
628 | except AttributeError: | ||
629 | mime_type = mimetypes.guess_type(media_file)[0] | ||
630 | return mime_type | ||
631 | |||
632 | def __load_media_file(self, media_file, mime_type=None, file_name=None): | ||
633 | if isinstance(media_file, PurePath): | ||
634 | media_file = str(media_file) | ||
635 | if isinstance(media_file, str) and os.path.isfile(media_file): | ||
636 | mime_type = self.__guess_type(media_file) | ||
637 | media_file = open(media_file, 'rb') | ||
638 | elif isinstance(media_file, str) and os.path.isfile(media_file): | ||
639 | media_file = open(media_file, 'rb') | ||
640 | if mime_type is None: | ||
641 | raise MastodonIllegalArgumentError('Could not determine mime type or data passed directly without mime type.') | ||
642 | if file_name is None: | ||
643 | random_suffix = uuid.uuid4().hex | ||
644 | file_name = "mastodonpyupload_" + str(time.time()) + "_" + str(random_suffix) + mimetypes.guess_extension(mime_type) | ||
645 | return (file_name, media_file, mime_type) | ||
646 | |||
647 | @staticmethod | ||
648 | def __protocolize(base_url): | ||
649 | """Internal add-protocol-to-url helper""" | ||
650 | if not base_url.startswith("http://") and not base_url.startswith("https://"): | ||
651 | base_url = "https://" + base_url | ||
652 | |||
653 | # Some API endpoints can't handle extra /'s in path requests | ||
654 | base_url = base_url.rstrip("/") | ||
655 | return base_url | ||
656 | |||
657 | @staticmethod | ||
658 | def __deprotocolize(base_url): | ||
659 | """Internal helper to strip http and https from a URL""" | ||
660 | if base_url.startswith("http://"): | ||
661 | base_url = base_url[7:] | ||
662 | elif base_url.startswith("https://") or base_url.startswith("onion://"): | ||
663 | base_url = base_url[8:] | ||
664 | return base_url | ||
diff --git a/mastodon/utility.py b/mastodon/utility.py new file mode 100644 index 0000000..f393aa8 --- /dev/null +++ b/mastodon/utility.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # utility.py - utility functions, externally usable | ||
2 | |||
3 | import re | ||
4 | from decorator import decorate | ||
5 | from .error import MastodonVersionError | ||
6 | |||
7 | ### | ||
8 | # Version check functions, including decorator and parser | ||
9 | ### | ||
10 | def parse_version_string(version_string): | ||
11 | """Parses a semver version string, stripping off "rc" stuff if present.""" | ||
12 | string_parts = version_string.split(".") | ||
13 | version_parts = ( | ||
14 | int(re.match("([0-9]*)", string_parts[0]).group(0)), | ||
15 | int(re.match("([0-9]*)", string_parts[1]).group(0)), | ||
16 | int(re.match("([0-9]*)", string_parts[2]).group(0)) | ||
17 | ) | ||
18 | return version_parts | ||
19 | |||
20 | def max_version(*version_strings): | ||
21 | """Returns the maximum version of all provided version strings.""" | ||
22 | return max(version_strings, key=parse_version_string) | ||
23 | |||
24 | def api_version(created_ver, last_changed_ver, return_value_ver): | ||
25 | """Version check decorator. Currently only checks Bigger Than.""" | ||
26 | def api_min_version_decorator(function): | ||
27 | def wrapper(function, self, *args, **kwargs): | ||
28 | if not self.version_check_mode == "none": | ||
29 | if self.version_check_mode == "created": | ||
30 | version = created_ver | ||
31 | else: | ||
32 | version = max_version(last_changed_ver, return_value_ver) | ||
33 | major, minor, patch = parse_version_string(version) | ||
34 | if major > self.mastodon_major: | ||
35 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
36 | elif major == self.mastodon_major and minor > self.mastodon_minor: | ||
37 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
38 | elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch: | ||
39 | raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")") | ||
40 | return function(self, *args, **kwargs) | ||
41 | function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + \ | ||
42 | created_ver + ", last changed: Mastodon v" + last_changed_ver + "*" | ||
43 | return decorate(function, wrapper) | ||
44 | return api_min_version_decorator | ||
45 | |||
46 | ### | ||
47 | # Dict helper class. | ||
48 | # Defined at top level so it can be pickled. | ||
49 | ### | ||
50 | class AttribAccessDict(dict): | ||
51 | def __getattr__(self, attr): | ||
52 | if attr in self: | ||
53 | return self[attr] | ||
54 | else: | ||
55 | raise AttributeError("Attribute not found: " + str(attr)) | ||
56 | |||
57 | def __setattr__(self, attr, val): | ||
58 | if attr in self: | ||
59 | raise AttributeError("Attribute-style access is read only") | ||
60 | super(AttribAccessDict, self).__setattr__(attr, val) | ||
61 | |||
62 | |||
63 | ### | ||
64 | # List helper class. | ||
65 | # Defined at top level so it can be pickled. | ||
66 | ### | ||
67 | class AttribAccessList(list): | ||
68 | def __getattr__(self, attr): | ||
69 | if attr in self: | ||
70 | return self[attr] | ||
71 | else: | ||
72 | raise AttributeError("Attribute not found: " + str(attr)) | ||
73 | |||
74 | def __setattr__(self, attr, val): | ||
75 | if attr in self: | ||
76 | raise AttributeError("Attribute-style access is read only") | ||
77 | super(AttribAccessList, self).__setattr__(attr, val) \ No newline at end of file | ||