diff options
-rw-r--r-- | mastodon/Mastodon.py | 964 | ||||
-rw-r--r-- | mastodon/compat.py | 45 | ||||
-rw-r--r-- | mastodon/defaults.py | 64 | ||||
-rw-r--r-- | mastodon/error.py | 90 | ||||
-rw-r--r-- | mastodon/internals.py | 658 | ||||
-rw-r--r-- | mastodon/utility.py | 77 |
6 files changed, 966 insertions, 932 deletions
diff --git a/mastodon/Mastodon.py b/mastodon/Mastodon.py index 9e30429..ad8e963 100644 --- a/mastodon/Mastodon.py +++ b/mastodon/Mastodon.py | |||
@@ -4,10 +4,7 @@ import json | |||
4 | import base64 | 4 | import base64 |
5 | import os | 5 | import os |
6 | import os.path | 6 | import os.path |
7 | import mimetypes | ||
8 | import time | 7 | import time |
9 | import random | ||
10 | import string | ||
11 | import datetime | 8 | import datetime |
12 | import collections | 9 | import collections |
13 | from contextlib import closing | 10 | from contextlib import closing |
@@ -17,203 +14,37 @@ import dateutil | |||
17 | import dateutil.parser | 14 | import dateutil.parser |
18 | import re | 15 | import re |
19 | import copy | 16 | import copy |
20 | import threading | ||
21 | import sys | ||
22 | import six | ||
23 | import uuid | ||
24 | from decorator import decorate | ||
25 | import hashlib | ||
26 | |||
27 | IMPL_HAS_CRYPTO = True | ||
28 | try: | ||
29 | import cryptography | ||
30 | from cryptography.hazmat.backends import default_backend | ||
31 | from cryptography.hazmat.primitives.asymmetric import ec | ||
32 | from cryptography.hazmat.primitives import serialization | ||
33 | except: | ||
34 | IMPL_HAS_CRYPTO = False | ||
35 | |||
36 | IMPL_HAS_ECE = True | ||
37 | try: | ||
38 | import http_ece | ||
39 | except: | ||
40 | IMPL_HAS_ECE = False | ||
41 | |||
42 | IMPL_HAS_BLURHASH = True | ||
43 | try: | ||
44 | import blurhash | ||
45 | except: | ||
46 | IMPL_HAS_BLURHASH = False | ||
47 | |||
48 | try: | ||
49 | from urllib.parse import urlparse | ||
50 | except ImportError: | ||
51 | from urlparse import urlparse | ||
52 | |||
53 | try: | ||
54 | import magic | ||
55 | except ImportError: | ||
56 | magic = None | ||
57 | |||
58 | try: | ||
59 | from pathlib import PurePath | ||
60 | except: | ||
61 | class PurePath: | ||
62 | pass | ||
63 | 17 | ||
64 | ### | ||
65 | # Version check functions, including decorator and parser | ||
66 | ### | ||
67 | 18 | ||
19 | from .compat import IMPL_HAS_CRYPTO, IMPL_HAS_ECE, IMPL_HAS_BLURHASH | ||
20 | from .compat import cryptography, default_backend, ec, serialization, http_ece | ||
21 | from .compat import blurhash | ||
22 | from .compat import urlparse | ||
68 | 23 | ||
69 | def parse_version_string(version_string): | 24 | from .utility import parse_version_string, max_version, api_version |
70 | """Parses a semver version string, stripping off "rc" stuff if present.""" | 25 | from .utility import AttribAccessDict, AttribAccessDict |
71 | string_parts = version_string.split(".") | ||
72 | version_parts = ( | ||
73 | int(re.match("([0-9]*)", string_parts[0]).group(0)), | ||
74 | int(re.match("([0-9]*)", string_parts[1]).group(0)), | ||
75 | int(re.match("([0-9]*)", string_parts[2]).group(0)) | ||
76 | ) | ||
77 | return version_parts | ||
78 | |||
79 | def max_version(*version_strings): | ||
80 | """Returns the maximum version of all provided version strings.""" | ||
81 | return max(version_strings, key=parse_version_string) | ||
82 | |||
83 | def api_version(created_ver, last_changed_ver, return_value_ver): | ||
84 | """Version check decorator. Currently only checks Bigger Than.""" | ||
85 | def api_min_version_decorator(function): | ||
86 | def wrapper(function, self, *args, **kwargs): | ||
87 | if not self.version_check_mode == "none": | ||
88 | if self.version_check_mode == "created": | ||
89 | version = created_ver | ||
90 | else: | ||
91 | version = max_version(last_changed_ver, return_value_ver) | ||
92 | major, minor, patch = parse_version_string(version) | ||
93 | if major > self.mastodon_major: | ||
94 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
95 | elif major == self.mastodon_major and minor > self.mastodon_minor: | ||
96 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
97 | elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch: | ||
98 | raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")") | ||
99 | return function(self, *args, **kwargs) | ||
100 | function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + \ | ||
101 | created_ver + ", last changed: Mastodon v" + last_changed_ver + "*" | ||
102 | return decorate(function, wrapper) | ||
103 | return api_min_version_decorator | ||
104 | 26 | ||
105 | ### | 27 | from .error import * |
106 | # Dict helper class. | 28 | from .defaults import _DEFAULT_TIMEOUT, _DEFAULT_SCOPES, _DEFAULT_STREAM_TIMEOUT, _DEFAULT_STREAM_RECONNECT_WAIT_SEC |
107 | # Defined at top level so it can be pickled. | 29 | from .defaults import _SCOPE_SETS |
108 | ### | ||
109 | class AttribAccessDict(dict): | ||
110 | def __getattr__(self, attr): | ||
111 | if attr in self: | ||
112 | return self[attr] | ||
113 | else: | ||
114 | raise AttributeError("Attribute not found: " + str(attr)) | ||
115 | 30 | ||
116 | def __setattr__(self, attr, val): | 31 | from .internals import Mastodon as Internals |
117 | if attr in self: | ||
118 | raise AttributeError("Attribute-style access is read only") | ||
119 | super(AttribAccessDict, self).__setattr__(attr, val) | ||
120 | 32 | ||
121 | 33 | ## | |
122 | ### | ||
123 | # List helper class. | ||
124 | # Defined at top level so it can be pickled. | ||
125 | ### | ||
126 | class AttribAccessList(list): | ||
127 | def __getattr__(self, attr): | ||
128 | if attr in self: | ||
129 | return self[attr] | ||
130 | else: | ||
131 | raise AttributeError("Attribute not found: " + str(attr)) | ||
132 | |||
133 | def __setattr__(self, attr, val): | ||
134 | if attr in self: | ||
135 | raise AttributeError("Attribute-style access is read only") | ||
136 | super(AttribAccessList, self).__setattr__(attr, val) | ||
137 | |||
138 | |||
139 | ### | ||
140 | # The actual Mastodon class | 34 | # The actual Mastodon class |
141 | ### | 35 | ### |
142 | class Mastodon: | 36 | class Mastodon(Internals): |
143 | """ | 37 | """ |
144 | Thorough and easy to use Mastodon | 38 | Thorough and easy to use Mastodon |
145 | API wrapper in Python. | 39 | API wrapper in Python. |
146 | 40 | ||
147 | If anything is unclear, check the official API docs at | 41 | Main class, imports most things from modules |
148 | https://github.com/mastodon/documentation/blob/master/content/en/client/intro.md | ||
149 | """ | 42 | """ |
150 | __DEFAULT_TIMEOUT = 300 | 43 | |
151 | __DEFAULT_STREAM_TIMEOUT = 300 | 44 | # Support level |
152 | __DEFAULT_STREAM_RECONNECT_WAIT_SEC = 5 | 45 | __SUPPORTED_MASTODON_VERSION = "3.5.5" |
153 | __DEFAULT_SCOPES = ['read', 'write', 'follow', 'push'] | ||
154 | __SCOPE_SETS = { | ||
155 | 'read': [ | ||
156 | 'read:accounts', | ||
157 | 'read:blocks', | ||
158 | 'read:favourites', | ||
159 | 'read:filters', | ||
160 | 'read:follows', | ||
161 | 'read:lists', | ||
162 | 'read:mutes', | ||
163 | 'read:notifications', | ||
164 | 'read:search', | ||
165 | 'read:statuses', | ||
166 | 'read:bookmarks' | ||
167 | ], | ||
168 | 'write': [ | ||
169 | 'write:accounts', | ||
170 | 'write:blocks', | ||
171 | 'write:favourites', | ||
172 | 'write:filters', | ||
173 | 'write:follows', | ||
174 | 'write:lists', | ||
175 | 'write:media', | ||
176 | 'write:mutes', | ||
177 | 'write:notifications', | ||
178 | 'write:reports', | ||
179 | 'write:statuses', | ||
180 | 'write:bookmarks' | ||
181 | ], | ||
182 | 'follow': [ | ||
183 | 'read:blocks', | ||
184 | 'read:follows', | ||
185 | 'read:mutes', | ||
186 | 'write:blocks', | ||
187 | 'write:follows', | ||
188 | 'write:mutes', | ||
189 | ], | ||
190 | 'admin:read': [ | ||
191 | 'admin:read:accounts', | ||
192 | 'admin:read:reports', | ||
193 | 'admin:read:domain_allows', | ||
194 | 'admin:read:domain_blocks', | ||
195 | 'admin:read:ip_blocks', | ||
196 | 'admin:read:email_domain_blocks', | ||
197 | 'admin:read:canonical_email_blocks', | ||
198 | ], | ||
199 | 'admin:write': [ | ||
200 | 'admin:write:accounts', | ||
201 | 'admin:write:reports', | ||
202 | 'admin:write:domain_allows', | ||
203 | 'admin:write:domain_blocks', | ||
204 | 'admin:write:ip_blocks', | ||
205 | 'admin:write:email_domain_blocks', | ||
206 | 'admin:write:canonical_email_blocks', | ||
207 | ], | ||
208 | } | ||
209 | __VALID_SCOPES = ['read', 'write', 'follow', 'push', 'admin:read', 'admin:write'] + \ | ||
210 | __SCOPE_SETS['read'] + __SCOPE_SETS['write'] + \ | ||
211 | __SCOPE_SETS['admin:read'] + __SCOPE_SETS['admin:write'] | ||
212 | |||
213 | __SUPPORTED_MASTODON_VERSION = "3.5.4" | ||
214 | 46 | ||
215 | # Dict versions | 47 | # Dict versions |
216 | # Dict versions | ||
217 | __DICT_VERSION_APPLICATION = "2.7.2" | 48 | __DICT_VERSION_APPLICATION = "2.7.2" |
218 | __DICT_VERSION_MENTION = "1.0.0" | 49 | __DICT_VERSION_MENTION = "1.0.0" |
219 | __DICT_VERSION_MEDIA = "3.2.0" | 50 | __DICT_VERSION_MEDIA = "3.2.0" |
@@ -253,8 +84,8 @@ class Mastodon: | |||
253 | # Registering apps | 84 | # Registering apps |
254 | ### | 85 | ### |
255 | @staticmethod | 86 | @staticmethod |
256 | def create_app(client_name, scopes=__DEFAULT_SCOPES, redirect_uris=None, website=None, to_file=None, | 87 | def create_app(client_name, scopes=_DEFAULT_SCOPES, redirect_uris=None, website=None, to_file=None, |
257 | api_base_url=None, request_timeout=__DEFAULT_TIMEOUT, session=None): | 88 | api_base_url=None, request_timeout=_DEFAULT_TIMEOUT, session=None): |
258 | """ | 89 | """ |
259 | Create a new app with given `client_name` and `scopes` (The basic scopes are "read", "write", "follow" and "push" | 90 | Create a new app with given `client_name` and `scopes` (The basic scopes are "read", "write", "follow" and "push" |
260 | - more granular scopes are available, please refer to Mastodon documentation for which) on the instance given | 91 | - more granular scopes are available, please refer to Mastodon documentation for which) on the instance given |
@@ -316,7 +147,7 @@ class Mastodon: | |||
316 | # Authentication, including constructor | 147 | # Authentication, including constructor |
317 | ### | 148 | ### |
318 | def __init__(self, client_id=None, client_secret=None, access_token=None, api_base_url=None, debug_requests=False, | 149 | def __init__(self, client_id=None, client_secret=None, access_token=None, api_base_url=None, debug_requests=False, |
319 | ratelimit_method="wait", ratelimit_pacefactor=1.1, request_timeout=__DEFAULT_TIMEOUT, mastodon_version=None, | 150 | ratelimit_method="wait", ratelimit_pacefactor=1.1, request_timeout=_DEFAULT_TIMEOUT, mastodon_version=None, |
320 | version_check_mode="created", session=None, feature_set="mainline", user_agent="mastodonpy", lang=None): | 151 | version_check_mode="created", session=None, feature_set="mainline", user_agent="mastodonpy", lang=None): |
321 | """ | 152 | """ |
322 | Create a new API wrapper instance based on the given `client_secret` and `client_id` on the | 153 | Create a new API wrapper instance based on the given `client_secret` and `client_id` on the |
@@ -552,7 +383,7 @@ class Mastodon: | |||
552 | """ | 383 | """ |
553 | return Mastodon.__SUPPORTED_MASTODON_VERSION | 384 | return Mastodon.__SUPPORTED_MASTODON_VERSION |
554 | 385 | ||
555 | def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob", scopes=__DEFAULT_SCOPES, force_login=False, state=None, lang=None): | 386 | def auth_request_url(self, client_id=None, redirect_uris="urn:ietf:wg:oauth:2.0:oob", scopes=_DEFAULT_SCOPES, force_login=False, state=None, lang=None): |
556 | """ | 387 | """ |
557 | Returns the URL that a client needs to request an OAuth grant from the server. | 388 | Returns the URL that a client needs to request an OAuth grant from the server. |
558 | 389 | ||
@@ -592,7 +423,7 @@ class Mastodon: | |||
592 | formatted_params = urlencode(params) | 423 | formatted_params = urlencode(params) |
593 | return "".join([self.api_base_url, "/oauth/authorize?", formatted_params]) | 424 | return "".join([self.api_base_url, "/oauth/authorize?", formatted_params]) |
594 | 425 | ||
595 | def log_in(self, username=None, password=None, code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None, scopes=__DEFAULT_SCOPES, to_file=None): | 426 | def log_in(self, username=None, password=None, code=None, redirect_uri="urn:ietf:wg:oauth:2.0:oob", refresh_token=None, scopes=_DEFAULT_SCOPES, to_file=None): |
596 | """ | 427 | """ |
597 | Get the access token for a user. | 428 | Get the access token for a user. |
598 | 429 | ||
@@ -643,9 +474,9 @@ class Mastodon: | |||
643 | raise MastodonIllegalArgumentError('Invalid request: %s' % e) | 474 | raise MastodonIllegalArgumentError('Invalid request: %s' % e) |
644 | 475 | ||
645 | received_scopes = response["scope"].split(" ") | 476 | received_scopes = response["scope"].split(" ") |
646 | for scope_set in self.__SCOPE_SETS.keys(): | 477 | for scope_set in _SCOPE_SETS.keys(): |
647 | if scope_set in received_scopes: | 478 | if scope_set in received_scopes: |
648 | received_scopes += self.__SCOPE_SETS[scope_set] | 479 | received_scopes += _SCOPE_SETS[scope_set] |
649 | 480 | ||
650 | if not set(scopes) <= set(received_scopes): | 481 | if not set(scopes) <= set(received_scopes): |
651 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') | 482 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') |
@@ -686,7 +517,7 @@ class Mastodon: | |||
686 | self.__logged_in_id = None | 517 | self.__logged_in_id = None |
687 | 518 | ||
688 | @api_version("2.7.0", "2.7.0", "3.4.0") | 519 | @api_version("2.7.0", "2.7.0", "3.4.0") |
689 | def create_account(self, username, password, email, agreement=False, reason=None, locale="en", scopes=__DEFAULT_SCOPES, to_file=None, return_detailed_error=False): | 520 | def create_account(self, username, password, email, agreement=False, reason=None, locale="en", scopes=_DEFAULT_SCOPES, to_file=None, return_detailed_error=False): |
690 | """ | 521 | """ |
691 | Creates a new user account with the given username, password and email. "agreement" | 522 | Creates a new user account with the given username, password and email. "agreement" |
692 | must be set to true (after showing the user the instance's user agreement and having | 523 | must be set to true (after showing the user the instance's user agreement and having |
@@ -759,9 +590,9 @@ class Mastodon: | |||
759 | 590 | ||
760 | # Step 3: Check scopes, persist, et cetera | 591 | # Step 3: Check scopes, persist, et cetera |
761 | received_scopes = response["scope"].split(" ") | 592 | received_scopes = response["scope"].split(" ") |
762 | for scope_set in self.__SCOPE_SETS.keys(): | 593 | for scope_set in _SCOPE_SETS.keys(): |
763 | if scope_set in received_scopes: | 594 | if scope_set in received_scopes: |
764 | received_scopes += self.__SCOPE_SETS[scope_set] | 595 | received_scopes += _SCOPE_SETS[scope_set] |
765 | 596 | ||
766 | if not set(scopes) <= set(received_scopes): | 597 | if not set(scopes) <= set(received_scopes): |
767 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') | 598 | raise MastodonAPIError('Granted scopes "' + " ".join(received_scopes) + '" do not contain all of the requested scopes "' + " ".join(scopes) + '".') |
@@ -3852,7 +3683,7 @@ class Mastodon: | |||
3852 | # Streaming | 3683 | # Streaming |
3853 | ### | 3684 | ### |
3854 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3685 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3855 | def stream_user(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3686 | def stream_user(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3856 | """ | 3687 | """ |
3857 | Streams events that are relevant to the authorized user, i.e. home | 3688 | Streams events that are relevant to the authorized user, i.e. home |
3858 | timeline and notifications. | 3689 | timeline and notifications. |
@@ -3860,21 +3691,21 @@ class Mastodon: | |||
3860 | return self.__stream('/api/v1/streaming/user', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3691 | return self.__stream('/api/v1/streaming/user', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3861 | 3692 | ||
3862 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3693 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3863 | def stream_public(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3694 | def stream_public(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3864 | """ | 3695 | """ |
3865 | Streams public events. | 3696 | Streams public events. |
3866 | """ | 3697 | """ |
3867 | return self.__stream('/api/v1/streaming/public', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3698 | return self.__stream('/api/v1/streaming/public', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3868 | 3699 | ||
3869 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3700 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3870 | def stream_local(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3701 | def stream_local(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3871 | """ | 3702 | """ |
3872 | Streams local public events. | 3703 | Streams local public events. |
3873 | """ | 3704 | """ |
3874 | return self.__stream('/api/v1/streaming/public/local', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3705 | return self.__stream('/api/v1/streaming/public/local', listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3875 | 3706 | ||
3876 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) | 3707 | @api_version("1.1.0", "1.4.2", __DICT_VERSION_STATUS) |
3877 | def stream_hashtag(self, tag, listener, local=False, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3708 | def stream_hashtag(self, tag, listener, local=False, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3878 | """ | 3709 | """ |
3879 | Stream for all public statuses for the hashtag 'tag' seen by the connected | 3710 | Stream for all public statuses for the hashtag 'tag' seen by the connected |
3880 | instance. | 3711 | instance. |
@@ -3890,7 +3721,7 @@ class Mastodon: | |||
3890 | return self.__stream("{}?tag={}".format(base, tag), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3721 | return self.__stream("{}?tag={}".format(base, tag), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3891 | 3722 | ||
3892 | @api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS) | 3723 | @api_version("2.1.0", "2.1.0", __DICT_VERSION_STATUS) |
3893 | def stream_list(self, id, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3724 | def stream_list(self, id, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3894 | """ | 3725 | """ |
3895 | Stream events for the current user, restricted to accounts on the given | 3726 | Stream events for the current user, restricted to accounts on the given |
3896 | list. | 3727 | list. |
@@ -3899,7 +3730,7 @@ class Mastodon: | |||
3899 | return self.__stream("/api/v1/streaming/list?list={}".format(id), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) | 3730 | return self.__stream("/api/v1/streaming/list?list={}".format(id), listener, run_async=run_async, timeout=timeout, reconnect_async=reconnect_async, reconnect_async_wait_sec=reconnect_async_wait_sec) |
3900 | 3731 | ||
3901 | @api_version("2.6.0", "2.6.0", __DICT_VERSION_STATUS) | 3732 | @api_version("2.6.0", "2.6.0", __DICT_VERSION_STATUS) |
3902 | def stream_direct(self, listener, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | 3733 | def stream_direct(self, listener, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): |
3903 | """ | 3734 | """ |
3904 | Streams direct message events for the logged-in user, as conversation events. | 3735 | Streams direct message events for the logged-in user, as conversation events. |
3905 | """ | 3736 | """ |
@@ -3914,734 +3745,3 @@ class Mastodon: | |||
3914 | if api_okay in [b'OK', b'success']: | 3745 | if api_okay in [b'OK', b'success']: |
3915 | return True | 3746 | return True |
3916 | return False | 3747 | return False |
3917 | |||
3918 | ### | ||
3919 | # Internal helpers, dragons probably | ||
3920 | ### | ||
3921 | def __datetime_to_epoch(self, date_time): | ||
3922 | """ | ||
3923 | Converts a python datetime to unix epoch, accounting for | ||
3924 | time zones and such. | ||
3925 | |||
3926 | Assumes UTC if timezone is not given. | ||
3927 | """ | ||
3928 | if date_time.tzinfo is None: | ||
3929 | date_time = date_time.replace(tzinfo=datetime.timezone.utc) | ||
3930 | return date_time.timestamp() | ||
3931 | |||
3932 | def __get_logged_in_id(self): | ||
3933 | """ | ||
3934 | Fetch the logged in user's ID, with caching. ID is reset on calls to log_in. | ||
3935 | """ | ||
3936 | if self.__logged_in_id is None: | ||
3937 | self.__logged_in_id = self.account_verify_credentials().id | ||
3938 | return self.__logged_in_id | ||
3939 | |||
3940 | @staticmethod | ||
3941 | def __json_allow_dict_attrs(json_object): | ||
3942 | """ | ||
3943 | Makes it possible to use attribute notation to access a dicts | ||
3944 | elements, while still allowing the dict to act as a dict. | ||
3945 | """ | ||
3946 | if isinstance(json_object, dict): | ||
3947 | return AttribAccessDict(json_object) | ||
3948 | return json_object | ||
3949 | |||
3950 | @staticmethod | ||
3951 | def __json_date_parse(json_object): | ||
3952 | """ | ||
3953 | Parse dates in certain known json fields, if possible. | ||
3954 | """ | ||
3955 | known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at", | ||
3956 | "updated_at", "last_status_at", "starts_at", "ends_at", "published_at", "edited_at"] | ||
3957 | mark_delete = [] | ||
3958 | for k, v in json_object.items(): | ||
3959 | if k in known_date_fields: | ||
3960 | if v is not None: | ||
3961 | try: | ||
3962 | if isinstance(v, int): | ||
3963 | json_object[k] = datetime.datetime.fromtimestamp(v, datetime.timezone.utc) | ||
3964 | else: | ||
3965 | json_object[k] = dateutil.parser.parse(v) | ||
3966 | except: | ||
3967 | # When we can't parse a date, we just leave the field out | ||
3968 | mark_delete.append(k) | ||
3969 | # Two step process because otherwise python gets very upset | ||
3970 | for k in mark_delete: | ||
3971 | del json_object[k] | ||
3972 | return json_object | ||
3973 | |||
3974 | @staticmethod | ||
3975 | def __json_truefalse_parse(json_object): | ||
3976 | """ | ||
3977 | Parse 'True' / 'False' strings in certain known fields | ||
3978 | """ | ||
3979 | for key in ('follow', 'favourite', 'reblog', 'mention'): | ||
3980 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
3981 | if json_object[key].lower() == 'true': | ||
3982 | json_object[key] = True | ||
3983 | if json_object[key].lower() == 'false': | ||
3984 | json_object[key] = False | ||
3985 | return json_object | ||
3986 | |||
3987 | @staticmethod | ||
3988 | def __json_strnum_to_bignum(json_object): | ||
3989 | """ | ||
3990 | Converts json string numerals to native python bignums. | ||
3991 | """ | ||
3992 | for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses', 'day', 'last_read_id'): | ||
3993 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
3994 | try: | ||
3995 | json_object[key] = int(json_object[key]) | ||
3996 | except ValueError: | ||
3997 | pass | ||
3998 | |||
3999 | return json_object | ||
4000 | |||
4001 | @staticmethod | ||
4002 | def __json_hooks(json_object): | ||
4003 | """ | ||
4004 | All the json hooks. Used in request parsing. | ||
4005 | """ | ||
4006 | json_object = Mastodon.__json_strnum_to_bignum(json_object) | ||
4007 | json_object = Mastodon.__json_date_parse(json_object) | ||
4008 | json_object = Mastodon.__json_truefalse_parse(json_object) | ||
4009 | json_object = Mastodon.__json_allow_dict_attrs(json_object) | ||
4010 | return json_object | ||
4011 | |||
4012 | @staticmethod | ||
4013 | def __consistent_isoformat_utc(datetime_val): | ||
4014 | """ | ||
4015 | Function that does what isoformat does but it actually does the same | ||
4016 | every time instead of randomly doing different things on some systems | ||
4017 | and also it represents that time as the equivalent UTC time. | ||
4018 | """ | ||
4019 | isotime = datetime_val.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%S%z") | ||
4020 | if isotime[-2] != ":": | ||
4021 | isotime = isotime[:-2] + ":" + isotime[-2:] | ||
4022 | return isotime | ||
4023 | |||
4024 | def __api_request(self, method, endpoint, params={}, files={}, headers={}, access_token_override=None, base_url_override=None, | ||
4025 | do_ratelimiting=True, use_json=False, parse=True, return_response_object=False, skip_error_check=False, lang_override=None): | ||
4026 | """ | ||
4027 | Internal API request helper. | ||
4028 | """ | ||
4029 | response = None | ||
4030 | remaining_wait = 0 | ||
4031 | |||
4032 | # Add language to params if not None | ||
4033 | lang = self.lang | ||
4034 | if lang_override is not None: | ||
4035 | lang = lang_override | ||
4036 | if lang is not None: | ||
4037 | params["lang"] = lang | ||
4038 | |||
4039 | # "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it | ||
4040 | # would take to not hit the rate limit at that request rate. | ||
4041 | if do_ratelimiting and self.ratelimit_method == "pace": | ||
4042 | if self.ratelimit_remaining == 0: | ||
4043 | to_next = self.ratelimit_reset - time.time() | ||
4044 | if to_next > 0: | ||
4045 | # As a precaution, never sleep longer than 5 minutes | ||
4046 | to_next = min(to_next, 5 * 60) | ||
4047 | time.sleep(to_next) | ||
4048 | else: | ||
4049 | time_waited = time.time() - self.ratelimit_lastcall | ||
4050 | time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining) | ||
4051 | remaining_wait = time_wait - time_waited | ||
4052 | |||
4053 | if remaining_wait > 0: | ||
4054 | to_next = remaining_wait / self.ratelimit_pacefactor | ||
4055 | to_next = min(to_next, 5 * 60) | ||
4056 | time.sleep(to_next) | ||
4057 | |||
4058 | # Generate request headers | ||
4059 | headers = copy.deepcopy(headers) | ||
4060 | if self.access_token is not None: | ||
4061 | headers['Authorization'] = 'Bearer ' + self.access_token | ||
4062 | if access_token_override is not None: | ||
4063 | headers['Authorization'] = 'Bearer ' + access_token_override | ||
4064 | |||
4065 | # Add user-agent | ||
4066 | if self.user_agent: | ||
4067 | headers['User-Agent'] = self.user_agent | ||
4068 | |||
4069 | # Determine base URL | ||
4070 | base_url = self.api_base_url | ||
4071 | if base_url_override is not None: | ||
4072 | base_url = base_url_override | ||
4073 | |||
4074 | if self.debug_requests: | ||
4075 | print('Mastodon: Request to endpoint "' + base_url + | ||
4076 | endpoint + '" using method "' + method + '".') | ||
4077 | print('Parameters: ' + str(params)) | ||
4078 | print('Headers: ' + str(headers)) | ||
4079 | print('Files: ' + str(files)) | ||
4080 | |||
4081 | # Make request | ||
4082 | request_complete = False | ||
4083 | while not request_complete: | ||
4084 | request_complete = True | ||
4085 | |||
4086 | response_object = None | ||
4087 | try: | ||
4088 | kwargs = dict(headers=headers, files=files, timeout=self.request_timeout) | ||
4089 | if use_json: | ||
4090 | kwargs['json'] = params | ||
4091 | elif method == 'GET': | ||
4092 | kwargs['params'] = params | ||
4093 | else: | ||
4094 | kwargs['data'] = params | ||
4095 | |||
4096 | response_object = self.session.request(method, base_url + endpoint, **kwargs) | ||
4097 | except Exception as e: | ||
4098 | raise MastodonNetworkError( | ||
4099 | "Could not complete request: %s" % e) | ||
4100 | |||
4101 | if response_object is None: | ||
4102 | raise MastodonIllegalArgumentError("Illegal request.") | ||
4103 | |||
4104 | # Parse rate limiting headers | ||
4105 | if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting: | ||
4106 | self.ratelimit_remaining = int( | ||
4107 | response_object.headers['X-RateLimit-Remaining']) | ||
4108 | self.ratelimit_limit = int( | ||
4109 | response_object.headers['X-RateLimit-Limit']) | ||
4110 | |||
4111 | # For gotosocial, we need an int representation, but for non-ints this would crash | ||
4112 | try: | ||
4113 | ratelimit_intrep = str( | ||
4114 | int(response_object.headers['X-RateLimit-Reset'])) | ||
4115 | except: | ||
4116 | ratelimit_intrep = None | ||
4117 | |||
4118 | try: | ||
4119 | if ratelimit_intrep is not None and ratelimit_intrep == response_object.headers['X-RateLimit-Reset']: | ||
4120 | self.ratelimit_reset = int( | ||
4121 | response_object.headers['X-RateLimit-Reset']) | ||
4122 | else: | ||
4123 | ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset']) | ||
4124 | self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime) | ||
4125 | |||
4126 | # Adjust server time to local clock | ||
4127 | if 'Date' in response_object.headers: | ||
4128 | server_time_datetime = dateutil.parser.parse(response_object.headers['Date']) | ||
4129 | server_time = self.__datetime_to_epoch(server_time_datetime) | ||
4130 | server_time_diff = time.time() - server_time | ||
4131 | self.ratelimit_reset += server_time_diff | ||
4132 | self.ratelimit_lastcall = time.time() | ||
4133 | except Exception as e: | ||
4134 | raise MastodonRatelimitError( | ||
4135 | "Rate limit time calculations failed: %s" % e) | ||
4136 | |||
4137 | # Handle response | ||
4138 | if self.debug_requests: | ||
4139 | print('Mastodon: Response received with code ' + str(response_object.status_code) + '.') | ||
4140 | print('response headers: ' + str(response_object.headers)) | ||
4141 | print('Response text content: ' + str(response_object.text)) | ||
4142 | |||
4143 | if not response_object.ok: | ||
4144 | try: | ||
4145 | response = response_object.json(object_hook=self.__json_hooks) | ||
4146 | if isinstance(response, dict) and 'error' in response: | ||
4147 | error_msg = response['error'] | ||
4148 | elif isinstance(response, str): | ||
4149 | error_msg = response | ||
4150 | else: | ||
4151 | error_msg = None | ||
4152 | except ValueError: | ||
4153 | error_msg = None | ||
4154 | |||
4155 | # Handle rate limiting | ||
4156 | if response_object.status_code == 429: | ||
4157 | if self.ratelimit_method == 'throw' or not do_ratelimiting: | ||
4158 | raise MastodonRatelimitError('Hit rate limit.') | ||
4159 | elif self.ratelimit_method in ('wait', 'pace'): | ||
4160 | to_next = self.ratelimit_reset - time.time() | ||
4161 | if to_next > 0: | ||
4162 | # As a precaution, never sleep longer than 5 minutes | ||
4163 | to_next = min(to_next, 5 * 60) | ||
4164 | time.sleep(to_next) | ||
4165 | request_complete = False | ||
4166 | continue | ||
4167 | |||
4168 | if not skip_error_check: | ||
4169 | if response_object.status_code == 404: | ||
4170 | ex_type = MastodonNotFoundError | ||
4171 | if not error_msg: | ||
4172 | error_msg = 'Endpoint not found.' | ||
4173 | # this is for compatibility with older versions | ||
4174 | # which raised MastodonAPIError('Endpoint not found.') | ||
4175 | # on any 404 | ||
4176 | elif response_object.status_code == 401: | ||
4177 | ex_type = MastodonUnauthorizedError | ||
4178 | elif response_object.status_code == 500: | ||
4179 | ex_type = MastodonInternalServerError | ||
4180 | elif response_object.status_code == 502: | ||
4181 | ex_type = MastodonBadGatewayError | ||
4182 | elif response_object.status_code == 503: | ||
4183 | ex_type = MastodonServiceUnavailableError | ||
4184 | elif response_object.status_code == 504: | ||
4185 | ex_type = MastodonGatewayTimeoutError | ||
4186 | elif response_object.status_code >= 500 and \ | ||
4187 | response_object.status_code <= 511: | ||
4188 | ex_type = MastodonServerError | ||
4189 | else: | ||
4190 | ex_type = MastodonAPIError | ||
4191 | |||
4192 | raise ex_type('Mastodon API returned error', response_object.status_code, response_object.reason, error_msg) | ||
4193 | |||
4194 | if return_response_object: | ||
4195 | return response_object | ||
4196 | |||
4197 | if parse: | ||
4198 | try: | ||
4199 | response = response_object.json(object_hook=self.__json_hooks) | ||
4200 | except: | ||
4201 | raise MastodonAPIError( | ||
4202 | "Could not parse response as JSON, response code was %s, " | ||
4203 | "bad json content was '%s'" % (response_object.status_code, | ||
4204 | response_object.content)) | ||
4205 | else: | ||
4206 | response = response_object.content | ||
4207 | |||
4208 | # Parse link headers | ||
4209 | if isinstance(response, list) and \ | ||
4210 | 'Link' in response_object.headers and \ | ||
4211 | response_object.headers['Link'] != "": | ||
4212 | response = AttribAccessList(response) | ||
4213 | tmp_urls = requests.utils.parse_header_links( | ||
4214 | response_object.headers['Link'].rstrip('>').replace('>,<', ',<')) | ||
4215 | for url in tmp_urls: | ||
4216 | if 'rel' not in url: | ||
4217 | continue | ||
4218 | |||
4219 | if url['rel'] == 'next': | ||
4220 | # Be paranoid and extract max_id specifically | ||
4221 | next_url = url['url'] | ||
4222 | matchgroups = re.search( | ||
4223 | r"[?&]max_id=([^&]+)", next_url) | ||
4224 | |||
4225 | if matchgroups: | ||
4226 | next_params = copy.deepcopy(params) | ||
4227 | next_params['_pagination_method'] = method | ||
4228 | next_params['_pagination_endpoint'] = endpoint | ||
4229 | max_id = matchgroups.group(1) | ||
4230 | if max_id.isdigit(): | ||
4231 | next_params['max_id'] = int(max_id) | ||
4232 | else: | ||
4233 | next_params['max_id'] = max_id | ||
4234 | if "since_id" in next_params: | ||
4235 | del next_params['since_id'] | ||
4236 | if "min_id" in next_params: | ||
4237 | del next_params['min_id'] | ||
4238 | response._pagination_next = next_params | ||
4239 | |||
4240 | # Maybe other API users rely on the pagination info in the last item | ||
4241 | # Will be removed in future | ||
4242 | if isinstance(response[-1], AttribAccessDict): | ||
4243 | response[-1]._pagination_next = next_params | ||
4244 | |||
4245 | if url['rel'] == 'prev': | ||
4246 | # Be paranoid and extract since_id or min_id specifically | ||
4247 | prev_url = url['url'] | ||
4248 | |||
4249 | # Old and busted (pre-2.6.0): since_id pagination | ||
4250 | matchgroups = re.search( | ||
4251 | r"[?&]since_id=([^&]+)", prev_url) | ||
4252 | if matchgroups: | ||
4253 | prev_params = copy.deepcopy(params) | ||
4254 | prev_params['_pagination_method'] = method | ||
4255 | prev_params['_pagination_endpoint'] = endpoint | ||
4256 | since_id = matchgroups.group(1) | ||
4257 | if since_id.isdigit(): | ||
4258 | prev_params['since_id'] = int(since_id) | ||
4259 | else: | ||
4260 | prev_params['since_id'] = since_id | ||
4261 | if "max_id" in prev_params: | ||
4262 | del prev_params['max_id'] | ||
4263 | response._pagination_prev = prev_params | ||
4264 | |||
4265 | # Maybe other API users rely on the pagination info in the first item | ||
4266 | # Will be removed in future | ||
4267 | if isinstance(response[0], AttribAccessDict): | ||
4268 | response[0]._pagination_prev = prev_params | ||
4269 | |||
4270 | # New and fantastico (post-2.6.0): min_id pagination | ||
4271 | matchgroups = re.search( | ||
4272 | r"[?&]min_id=([^&]+)", prev_url) | ||
4273 | if matchgroups: | ||
4274 | prev_params = copy.deepcopy(params) | ||
4275 | prev_params['_pagination_method'] = method | ||
4276 | prev_params['_pagination_endpoint'] = endpoint | ||
4277 | min_id = matchgroups.group(1) | ||
4278 | if min_id.isdigit(): | ||
4279 | prev_params['min_id'] = int(min_id) | ||
4280 | else: | ||
4281 | prev_params['min_id'] = min_id | ||
4282 | if "max_id" in prev_params: | ||
4283 | del prev_params['max_id'] | ||
4284 | response._pagination_prev = prev_params | ||
4285 | |||
4286 | # Maybe other API users rely on the pagination info in the first item | ||
4287 | # Will be removed in future | ||
4288 | if isinstance(response[0], AttribAccessDict): | ||
4289 | response[0]._pagination_prev = prev_params | ||
4290 | |||
4291 | return response | ||
4292 | |||
4293 | def __get_streaming_base(self): | ||
4294 | """ | ||
4295 | Internal streaming API helper. | ||
4296 | |||
4297 | Returns the correct URL for the streaming API. | ||
4298 | """ | ||
4299 | instance = self.instance() | ||
4300 | if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url: | ||
4301 | # This is probably a websockets URL, which is really for the browser, but requests can't handle it | ||
4302 | # So we do this below to turn it into an HTTPS or HTTP URL | ||
4303 | parse = urlparse(instance["urls"]["streaming_api"]) | ||
4304 | if parse.scheme == 'wss': | ||
4305 | url = "https://" + parse.netloc | ||
4306 | elif parse.scheme == 'ws': | ||
4307 | url = "http://" + parse.netloc | ||
4308 | else: | ||
4309 | raise MastodonAPIError( | ||
4310 | "Could not parse streaming api location returned from server: {}.".format( | ||
4311 | instance["urls"]["streaming_api"])) | ||
4312 | else: | ||
4313 | url = self.api_base_url | ||
4314 | return url | ||
4315 | |||
4316 | def __stream(self, endpoint, listener, params={}, run_async=False, timeout=__DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=__DEFAULT_STREAM_RECONNECT_WAIT_SEC): | ||
4317 | """ | ||
4318 | Internal streaming API helper. | ||
4319 | |||
4320 | Returns a handle to the open connection that the user can close if they | ||
4321 | wish to terminate it. | ||
4322 | """ | ||
4323 | |||
4324 | # Check if we have to redirect | ||
4325 | url = self.__get_streaming_base() | ||
4326 | |||
4327 | # The streaming server can't handle two slashes in a path, so remove trailing slashes | ||
4328 | if url[-1] == '/': | ||
4329 | url = url[:-1] | ||
4330 | |||
4331 | # Connect function (called and then potentially passed to async handler) | ||
4332 | def connect_func(): | ||
4333 | headers = {"Authorization": "Bearer " + | ||
4334 | self.access_token} if self.access_token else {} | ||
4335 | if self.user_agent: | ||
4336 | headers['User-Agent'] = self.user_agent | ||
4337 | connection = self.session.get(url + endpoint, headers=headers, data=params, stream=True, | ||
4338 | timeout=(self.request_timeout, timeout)) | ||
4339 | |||
4340 | if connection.status_code != 200: | ||
4341 | raise MastodonNetworkError( | ||
4342 | "Could not connect to streaming server: %s" % connection.reason) | ||
4343 | return connection | ||
4344 | connection = None | ||
4345 | |||
4346 | # Async stream handler | ||
4347 | class __stream_handle(): | ||
4348 | def __init__(self, connection, connect_func, reconnect_async, reconnect_async_wait_sec): | ||
4349 | self.closed = False | ||
4350 | self.running = True | ||
4351 | self.connection = connection | ||
4352 | self.connect_func = connect_func | ||
4353 | self.reconnect_async = reconnect_async | ||
4354 | self.reconnect_async_wait_sec = reconnect_async_wait_sec | ||
4355 | self.reconnecting = False | ||
4356 | |||
4357 | def close(self): | ||
4358 | self.closed = True | ||
4359 | if self.connection is not None: | ||
4360 | self.connection.close() | ||
4361 | |||
4362 | def is_alive(self): | ||
4363 | return self._thread.is_alive() | ||
4364 | |||
4365 | def is_receiving(self): | ||
4366 | if self.closed or not self.running or self.reconnecting or not self.is_alive(): | ||
4367 | return False | ||
4368 | else: | ||
4369 | return True | ||
4370 | |||
4371 | def _sleep_attentive(self): | ||
4372 | if self._thread != threading.current_thread(): | ||
4373 | raise RuntimeError( | ||
4374 | "Illegal call from outside the stream_handle thread") | ||
4375 | time_remaining = self.reconnect_async_wait_sec | ||
4376 | while time_remaining > 0 and not self.closed: | ||
4377 | time.sleep(0.5) | ||
4378 | time_remaining -= 0.5 | ||
4379 | |||
4380 | def _threadproc(self): | ||
4381 | self._thread = threading.current_thread() | ||
4382 | |||
4383 | # Run until closed or until error if not autoreconnecting | ||
4384 | while self.running: | ||
4385 | if self.connection is not None: | ||
4386 | with closing(self.connection) as r: | ||
4387 | try: | ||
4388 | listener.handle_stream(r) | ||
4389 | except (AttributeError, MastodonMalformedEventError, MastodonNetworkError) as e: | ||
4390 | if not (self.closed or self.reconnect_async): | ||
4391 | raise e | ||
4392 | else: | ||
4393 | if self.closed: | ||
4394 | self.running = False | ||
4395 | |||
4396 | # Reconnect loop. Try immediately once, then with delays on error. | ||
4397 | if (self.reconnect_async and not self.closed) or self.connection is None: | ||
4398 | self.reconnecting = True | ||
4399 | connect_success = False | ||
4400 | while not connect_success: | ||
4401 | if self.closed: | ||
4402 | # Someone from outside stopped the streaming | ||
4403 | self.running = False | ||
4404 | break | ||
4405 | try: | ||
4406 | the_connection = self.connect_func() | ||
4407 | if the_connection.status_code != 200: | ||
4408 | exception = MastodonNetworkError(f"Could not connect to server. " | ||
4409 | f"HTTP status: {the_connection.status_code}") | ||
4410 | listener.on_abort(exception) | ||
4411 | self._sleep_attentive() | ||
4412 | if self.closed: | ||
4413 | # Here we have maybe a rare race condition. Exactly on connect, someone | ||
4414 | # stopped the streaming before. We close the previous established connection: | ||
4415 | the_connection.close() | ||
4416 | else: | ||
4417 | self.connection = the_connection | ||
4418 | connect_success = True | ||
4419 | except: | ||
4420 | self._sleep_attentive() | ||
4421 | connect_success = False | ||
4422 | self.reconnecting = False | ||
4423 | else: | ||
4424 | self.running = False | ||
4425 | return 0 | ||
4426 | |||
4427 | if run_async: | ||
4428 | handle = __stream_handle( | ||
4429 | connection, connect_func, reconnect_async, reconnect_async_wait_sec) | ||
4430 | t = threading.Thread(args=(), target=handle._threadproc) | ||
4431 | t.daemon = True | ||
4432 | t.start() | ||
4433 | return handle | ||
4434 | else: | ||
4435 | # Blocking, never returns (can only leave via exception) | ||
4436 | connection = connect_func() | ||
4437 | with closing(connection) as r: | ||
4438 | listener.handle_stream(r) | ||
4439 | |||
4440 | def __generate_params(self, params, exclude=[]): | ||
4441 | """ | ||
4442 | Internal named-parameters-to-dict helper. | ||
4443 | |||
4444 | Note for developers: If called with locals() as params, | ||
4445 | as is the usual practice in this code, the __generate_params call | ||
4446 | (or at least the locals() call) should generally be the first thing | ||
4447 | in your function. | ||
4448 | """ | ||
4449 | params = collections.OrderedDict(params) | ||
4450 | |||
4451 | if 'self' in params: | ||
4452 | del params['self'] | ||
4453 | |||
4454 | param_keys = list(params.keys()) | ||
4455 | for key in param_keys: | ||
4456 | if isinstance(params[key], bool): | ||
4457 | params[key] = '1' if params[key] else '0' | ||
4458 | |||
4459 | for key in param_keys: | ||
4460 | if params[key] is None or key in exclude: | ||
4461 | del params[key] | ||
4462 | |||
4463 | param_keys = list(params.keys()) | ||
4464 | for key in param_keys: | ||
4465 | if isinstance(params[key], list): | ||
4466 | params[key + "[]"] = params[key] | ||
4467 | del params[key] | ||
4468 | |||
4469 | return params | ||
4470 | |||
4471 | def __unpack_id(self, id, dateconv=False): | ||
4472 | """ | ||
4473 | Internal object-to-id converter | ||
4474 | |||
4475 | Checks if id is a dict that contains id and | ||
4476 | returns the id inside, otherwise just returns | ||
4477 | the id straight. | ||
4478 | |||
4479 | Also unpacks datetimes to snowflake IDs if requested. | ||
4480 | """ | ||
4481 | if isinstance(id, dict) and "id" in id: | ||
4482 | id = id["id"] | ||
4483 | if dateconv and isinstance(id, datetime.datetime): | ||
4484 | id = (int(id.timestamp()) << 16) * 1000 | ||
4485 | return id | ||
4486 | |||
4487 | def __decode_webpush_b64(self, data): | ||
4488 | """ | ||
4489 | Re-pads and decodes urlsafe base64. | ||
4490 | """ | ||
4491 | missing_padding = len(data) % 4 | ||
4492 | if missing_padding != 0: | ||
4493 | data += '=' * (4 - missing_padding) | ||
4494 | return base64.urlsafe_b64decode(data) | ||
4495 | |||
4496 | def __get_token_expired(self): | ||
4497 | """Internal helper for oauth code""" | ||
4498 | return self._token_expired < datetime.datetime.now() | ||
4499 | |||
4500 | def __set_token_expired(self, value): | ||
4501 | """Internal helper for oauth code""" | ||
4502 | self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value) | ||
4503 | return | ||
4504 | |||
4505 | def __get_refresh_token(self): | ||
4506 | """Internal helper for oauth code""" | ||
4507 | return self._refresh_token | ||
4508 | |||
4509 | def __set_refresh_token(self, value): | ||
4510 | """Internal helper for oauth code""" | ||
4511 | self._refresh_token = value | ||
4512 | return | ||
4513 | |||
4514 | def __guess_type(self, media_file): | ||
4515 | """Internal helper to guess media file type""" | ||
4516 | mime_type = None | ||
4517 | try: | ||
4518 | mime_type = magic.from_file(media_file, mime=True) | ||
4519 | except AttributeError: | ||
4520 | mime_type = mimetypes.guess_type(media_file)[0] | ||
4521 | return mime_type | ||
4522 | |||
4523 | def __load_media_file(self, media_file, mime_type=None, file_name=None): | ||
4524 | if isinstance(media_file, PurePath): | ||
4525 | media_file = str(media_file) | ||
4526 | if isinstance(media_file, str) and os.path.isfile(media_file): | ||
4527 | mime_type = self.__guess_type(media_file) | ||
4528 | media_file = open(media_file, 'rb') | ||
4529 | elif isinstance(media_file, str) and os.path.isfile(media_file): | ||
4530 | media_file = open(media_file, 'rb') | ||
4531 | if mime_type is None: | ||
4532 | raise MastodonIllegalArgumentError( | ||
4533 | 'Could not determine mime type or data passed directly without mime type.') | ||
4534 | if file_name is None: | ||
4535 | random_suffix = uuid.uuid4().hex | ||
4536 | file_name = "mastodonpyupload_" + \ | ||
4537 | str(time.time()) + "_" + str(random_suffix) + \ | ||
4538 | mimetypes.guess_extension(mime_type) | ||
4539 | return (file_name, media_file, mime_type) | ||
4540 | |||
4541 | @staticmethod | ||
4542 | def __protocolize(base_url): | ||
4543 | """Internal add-protocol-to-url helper""" | ||
4544 | if not base_url.startswith("http://") and not base_url.startswith("https://"): | ||
4545 | base_url = "https://" + base_url | ||
4546 | |||
4547 | # Some API endpoints can't handle extra /'s in path requests | ||
4548 | base_url = base_url.rstrip("/") | ||
4549 | return base_url | ||
4550 | |||
4551 | @staticmethod | ||
4552 | def __deprotocolize(base_url): | ||
4553 | """Internal helper to strip http and https from a URL""" | ||
4554 | if base_url.startswith("http://"): | ||
4555 | base_url = base_url[7:] | ||
4556 | elif base_url.startswith("https://") or base_url.startswith("onion://"): | ||
4557 | base_url = base_url[8:] | ||
4558 | return base_url | ||
4559 | |||
4560 | ## | ||
4561 | # Exceptions | ||
4562 | ## | ||
4563 | class MastodonError(Exception): | ||
4564 | """Base class for Mastodon.py exceptions""" | ||
4565 | |||
4566 | |||
4567 | class MastodonVersionError(MastodonError): | ||
4568 | """Raised when a function is called that the version of Mastodon for which | ||
4569 | Mastodon.py was instantiated does not support""" | ||
4570 | |||
4571 | |||
4572 | class MastodonIllegalArgumentError(ValueError, MastodonError): | ||
4573 | """Raised when an incorrect parameter is passed to a function""" | ||
4574 | pass | ||
4575 | |||
4576 | |||
4577 | class MastodonIOError(IOError, MastodonError): | ||
4578 | """Base class for Mastodon.py I/O errors""" | ||
4579 | |||
4580 | |||
4581 | class MastodonFileNotFoundError(MastodonIOError): | ||
4582 | """Raised when a file requested to be loaded can not be opened""" | ||
4583 | pass | ||
4584 | |||
4585 | |||
4586 | class MastodonNetworkError(MastodonIOError): | ||
4587 | """Raised when network communication with the server fails""" | ||
4588 | pass | ||
4589 | |||
4590 | |||
4591 | class MastodonReadTimeout(MastodonNetworkError): | ||
4592 | """Raised when a stream times out""" | ||
4593 | pass | ||
4594 | |||
4595 | |||
4596 | class MastodonAPIError(MastodonError): | ||
4597 | """Raised when the mastodon API generates a response that cannot be handled""" | ||
4598 | pass | ||
4599 | |||
4600 | |||
4601 | class MastodonServerError(MastodonAPIError): | ||
4602 | """Raised if the Server is malconfigured and returns a 5xx error code""" | ||
4603 | pass | ||
4604 | |||
4605 | |||
4606 | class MastodonInternalServerError(MastodonServerError): | ||
4607 | """Raised if the Server returns a 500 error""" | ||
4608 | pass | ||
4609 | |||
4610 | |||
4611 | class MastodonBadGatewayError(MastodonServerError): | ||
4612 | """Raised if the Server returns a 502 error""" | ||
4613 | pass | ||
4614 | |||
4615 | |||
4616 | class MastodonServiceUnavailableError(MastodonServerError): | ||
4617 | """Raised if the Server returns a 503 error""" | ||
4618 | pass | ||
4619 | |||
4620 | |||
4621 | class MastodonGatewayTimeoutError(MastodonServerError): | ||
4622 | """Raised if the Server returns a 504 error""" | ||
4623 | pass | ||
4624 | |||
4625 | |||
4626 | class MastodonNotFoundError(MastodonAPIError): | ||
4627 | """Raised when the Mastodon API returns a 404 Not Found error""" | ||
4628 | pass | ||
4629 | |||
4630 | |||
4631 | class MastodonUnauthorizedError(MastodonAPIError): | ||
4632 | """Raised when the Mastodon API returns a 401 Unauthorized error | ||
4633 | |||
4634 | This happens when an OAuth token is invalid or has been revoked, | ||
4635 | or when trying to access an endpoint that can't be used without | ||
4636 | authentication without providing credentials.""" | ||
4637 | pass | ||
4638 | |||
4639 | |||
4640 | class MastodonRatelimitError(MastodonError): | ||
4641 | """Raised when rate limiting is set to manual mode and the rate limit is exceeded""" | ||
4642 | pass | ||
4643 | |||
4644 | |||
4645 | class MastodonMalformedEventError(MastodonError): | ||
4646 | """Raised when the server-sent event stream is malformed""" | ||
4647 | pass | ||
diff --git a/mastodon/compat.py b/mastodon/compat.py new file mode 100644 index 0000000..905bfa7 --- /dev/null +++ b/mastodon/compat.py | |||
@@ -0,0 +1,45 @@ | |||
1 | # compat.py - backwards compatible optional imports | ||
2 | |||
3 | IMPL_HAS_CRYPTO = True | ||
4 | try: | ||
5 | import cryptography | ||
6 | from cryptography.hazmat.backends import default_backend | ||
7 | from cryptography.hazmat.primitives.asymmetric import ec | ||
8 | from cryptography.hazmat.primitives import serialization | ||
9 | except: | ||
10 | IMPL_HAS_CRYPTO = False | ||
11 | cryptography = None | ||
12 | default_backend = None | ||
13 | ec = None | ||
14 | serialization = None | ||
15 | |||
16 | IMPL_HAS_ECE = True | ||
17 | try: | ||
18 | import http_ece | ||
19 | except: | ||
20 | IMPL_HAS_ECE = False | ||
21 | http_ece = None | ||
22 | |||
23 | IMPL_HAS_BLURHASH = True | ||
24 | try: | ||
25 | import blurhash | ||
26 | except: | ||
27 | IMPL_HAS_BLURHASH = False | ||
28 | blurhash = None | ||
29 | |||
30 | try: | ||
31 | from urllib.parse import urlparse | ||
32 | except ImportError: | ||
33 | from urlparse import urlparse | ||
34 | |||
35 | try: | ||
36 | import magic | ||
37 | except ImportError: | ||
38 | magic = None | ||
39 | |||
40 | try: | ||
41 | from pathlib import PurePath | ||
42 | except: | ||
43 | class PurePath: | ||
44 | pass | ||
45 | |||
diff --git a/mastodon/defaults.py b/mastodon/defaults.py new file mode 100644 index 0000000..950ffa2 --- /dev/null +++ b/mastodon/defaults.py | |||
@@ -0,0 +1,64 @@ | |||
1 | # defaults.py - default values for various parameters | ||
2 | |||
3 | _DEFAULT_TIMEOUT = 300 | ||
4 | _DEFAULT_STREAM_TIMEOUT = 300 | ||
5 | _DEFAULT_STREAM_RECONNECT_WAIT_SEC = 5 | ||
6 | _DEFAULT_SCOPES = ['read', 'write', 'follow', 'push'] | ||
7 | _SCOPE_SETS = { | ||
8 | 'read': [ | ||
9 | 'read:accounts', | ||
10 | 'read:blocks', | ||
11 | 'read:favourites', | ||
12 | 'read:filters', | ||
13 | 'read:follows', | ||
14 | 'read:lists', | ||
15 | 'read:mutes', | ||
16 | 'read:notifications', | ||
17 | 'read:search', | ||
18 | 'read:statuses', | ||
19 | 'read:bookmarks' | ||
20 | ], | ||
21 | 'write': [ | ||
22 | 'write:accounts', | ||
23 | 'write:blocks', | ||
24 | 'write:favourites', | ||
25 | 'write:filters', | ||
26 | 'write:follows', | ||
27 | 'write:lists', | ||
28 | 'write:media', | ||
29 | 'write:mutes', | ||
30 | 'write:notifications', | ||
31 | 'write:reports', | ||
32 | 'write:statuses', | ||
33 | 'write:bookmarks' | ||
34 | ], | ||
35 | 'follow': [ | ||
36 | 'read:blocks', | ||
37 | 'read:follows', | ||
38 | 'read:mutes', | ||
39 | 'write:blocks', | ||
40 | 'write:follows', | ||
41 | 'write:mutes', | ||
42 | ], | ||
43 | 'admin:read': [ | ||
44 | 'admin:read:accounts', | ||
45 | 'admin:read:reports', | ||
46 | 'admin:read:domain_allows', | ||
47 | 'admin:read:domain_blocks', | ||
48 | 'admin:read:ip_blocks', | ||
49 | 'admin:read:email_domain_blocks', | ||
50 | 'admin:read:canonical_email_blocks', | ||
51 | ], | ||
52 | 'admin:write': [ | ||
53 | 'admin:write:accounts', | ||
54 | 'admin:write:reports', | ||
55 | 'admin:write:domain_allows', | ||
56 | 'admin:write:domain_blocks', | ||
57 | 'admin:write:ip_blocks', | ||
58 | 'admin:write:email_domain_blocks', | ||
59 | 'admin:write:canonical_email_blocks', | ||
60 | ], | ||
61 | } | ||
62 | _VALID_SCOPES = ['read', 'write', 'follow', 'push', 'admin:read', 'admin:write'] + \ | ||
63 | _SCOPE_SETS['read'] + _SCOPE_SETS['write'] + \ | ||
64 | _SCOPE_SETS['admin:read'] + _SCOPE_SETS['admin:write'] \ No newline at end of file | ||
diff --git a/mastodon/error.py b/mastodon/error.py new file mode 100644 index 0000000..85cc313 --- /dev/null +++ b/mastodon/error.py | |||
@@ -0,0 +1,90 @@ | |||
1 | # error.py - error classes | ||
2 | |||
3 | ## | ||
4 | # Exceptions | ||
5 | ## | ||
6 | class MastodonError(Exception): | ||
7 | """Base class for Mastodon.py exceptions""" | ||
8 | |||
9 | |||
10 | class MastodonVersionError(MastodonError): | ||
11 | """Raised when a function is called that the version of Mastodon for which | ||
12 | Mastodon.py was instantiated does not support""" | ||
13 | |||
14 | |||
15 | class MastodonIllegalArgumentError(ValueError, MastodonError): | ||
16 | """Raised when an incorrect parameter is passed to a function""" | ||
17 | pass | ||
18 | |||
19 | |||
20 | class MastodonIOError(IOError, MastodonError): | ||
21 | """Base class for Mastodon.py I/O errors""" | ||
22 | |||
23 | |||
24 | class MastodonFileNotFoundError(MastodonIOError): | ||
25 | """Raised when a file requested to be loaded can not be opened""" | ||
26 | pass | ||
27 | |||
28 | |||
29 | class MastodonNetworkError(MastodonIOError): | ||
30 | """Raised when network communication with the server fails""" | ||
31 | pass | ||
32 | |||
33 | |||
34 | class MastodonReadTimeout(MastodonNetworkError): | ||
35 | """Raised when a stream times out""" | ||
36 | pass | ||
37 | |||
38 | |||
39 | class MastodonAPIError(MastodonError): | ||
40 | """Raised when the mastodon API generates a response that cannot be handled""" | ||
41 | pass | ||
42 | |||
43 | |||
44 | class MastodonServerError(MastodonAPIError): | ||
45 | """Raised if the Server is malconfigured and returns a 5xx error code""" | ||
46 | pass | ||
47 | |||
48 | |||
49 | class MastodonInternalServerError(MastodonServerError): | ||
50 | """Raised if the Server returns a 500 error""" | ||
51 | pass | ||
52 | |||
53 | |||
54 | class MastodonBadGatewayError(MastodonServerError): | ||
55 | """Raised if the Server returns a 502 error""" | ||
56 | pass | ||
57 | |||
58 | |||
59 | class MastodonServiceUnavailableError(MastodonServerError): | ||
60 | """Raised if the Server returns a 503 error""" | ||
61 | pass | ||
62 | |||
63 | |||
64 | class MastodonGatewayTimeoutError(MastodonServerError): | ||
65 | """Raised if the Server returns a 504 error""" | ||
66 | pass | ||
67 | |||
68 | |||
69 | class MastodonNotFoundError(MastodonAPIError): | ||
70 | """Raised when the Mastodon API returns a 404 Not Found error""" | ||
71 | pass | ||
72 | |||
73 | |||
74 | class MastodonUnauthorizedError(MastodonAPIError): | ||
75 | """Raised when the Mastodon API returns a 401 Unauthorized error | ||
76 | |||
77 | This happens when an OAuth token is invalid or has been revoked, | ||
78 | or when trying to access an endpoint that can't be used without | ||
79 | authentication without providing credentials.""" | ||
80 | pass | ||
81 | |||
82 | |||
83 | class MastodonRatelimitError(MastodonError): | ||
84 | """Raised when rate limiting is set to manual mode and the rate limit is exceeded""" | ||
85 | pass | ||
86 | |||
87 | |||
88 | class MastodonMalformedEventError(MastodonError): | ||
89 | """Raised when the server-sent event stream is malformed""" | ||
90 | pass | ||
diff --git a/mastodon/internals.py b/mastodon/internals.py new file mode 100644 index 0000000..415e22d --- /dev/null +++ b/mastodon/internals.py | |||
@@ -0,0 +1,658 @@ | |||
1 | import datetime | ||
2 | from contextlib import closing | ||
3 | import mimetypes | ||
4 | import threading | ||
5 | import six | ||
6 | import uuid | ||
7 | import dateutil.parser | ||
8 | import time | ||
9 | import copy | ||
10 | import requests | ||
11 | import re | ||
12 | import collections | ||
13 | import base64 | ||
14 | import os | ||
15 | |||
16 | from .utility import AttribAccessDict, AttribAccessList | ||
17 | from .error import MastodonNetworkError, MastodonIllegalArgumentError, MastodonRatelimitError, MastodonNotFoundError, \ | ||
18 | MastodonUnauthorizedError, MastodonInternalServerError, MastodonBadGatewayError, MastodonServiceUnavailableError, \ | ||
19 | MastodonGatewayTimeoutError, MastodonServerError, MastodonAPIError, MastodonMalformedEventError | ||
20 | from .compat import urlparse, magic, PurePath | ||
21 | from .defaults import _DEFAULT_STREAM_TIMEOUT, _DEFAULT_STREAM_RECONNECT_WAIT_SEC | ||
22 | |||
23 | ### | ||
24 | # Internal helpers, dragons probably | ||
25 | ### | ||
26 | class Mastodon(): | ||
27 | def __datetime_to_epoch(self, date_time): | ||
28 | """ | ||
29 | Converts a python datetime to unix epoch, accounting for | ||
30 | time zones and such. | ||
31 | |||
32 | Assumes UTC if timezone is not given. | ||
33 | """ | ||
34 | if date_time.tzinfo is None: | ||
35 | date_time = date_time.replace(tzinfo=datetime.timezone.utc) | ||
36 | return date_time.timestamp() | ||
37 | |||
38 | |||
39 | def __get_logged_in_id(self): | ||
40 | """ | ||
41 | Fetch the logged in user's ID, with caching. ID is reset on calls to log_in. | ||
42 | """ | ||
43 | if self.__logged_in_id is None: | ||
44 | self.__logged_in_id = self.account_verify_credentials().id | ||
45 | return self.__logged_in_id | ||
46 | |||
47 | @staticmethod | ||
48 | def __json_allow_dict_attrs(json_object): | ||
49 | """ | ||
50 | Makes it possible to use attribute notation to access a dicts | ||
51 | elements, while still allowing the dict to act as a dict. | ||
52 | """ | ||
53 | if isinstance(json_object, dict): | ||
54 | return AttribAccessDict(json_object) | ||
55 | return json_object | ||
56 | |||
57 | @staticmethod | ||
58 | def __json_date_parse(json_object): | ||
59 | """ | ||
60 | Parse dates in certain known json fields, if possible. | ||
61 | """ | ||
62 | known_date_fields = ["created_at", "week", "day", "expires_at", "scheduled_at", | ||
63 | "updated_at", "last_status_at", "starts_at", "ends_at", "published_at", "edited_at"] | ||
64 | mark_delete = [] | ||
65 | for k, v in json_object.items(): | ||
66 | if k in known_date_fields: | ||
67 | if v is not None: | ||
68 | try: | ||
69 | if isinstance(v, int): | ||
70 | json_object[k] = datetime.datetime.fromtimestamp(v, datetime.timezone.utc) | ||
71 | else: | ||
72 | json_object[k] = dateutil.parser.parse(v) | ||
73 | except: | ||
74 | # When we can't parse a date, we just leave the field out | ||
75 | mark_delete.append(k) | ||
76 | # Two step process because otherwise python gets very upset | ||
77 | for k in mark_delete: | ||
78 | del json_object[k] | ||
79 | return json_object | ||
80 | |||
81 | @staticmethod | ||
82 | def __json_truefalse_parse(json_object): | ||
83 | """ | ||
84 | Parse 'True' / 'False' strings in certain known fields | ||
85 | """ | ||
86 | for key in ('follow', 'favourite', 'reblog', 'mention'): | ||
87 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
88 | if json_object[key].lower() == 'true': | ||
89 | json_object[key] = True | ||
90 | if json_object[key].lower() == 'false': | ||
91 | json_object[key] = False | ||
92 | return json_object | ||
93 | |||
94 | @staticmethod | ||
95 | def __json_strnum_to_bignum(json_object): | ||
96 | """ | ||
97 | Converts json string numerals to native python bignums. | ||
98 | """ | ||
99 | for key in ('id', 'week', 'in_reply_to_id', 'in_reply_to_account_id', 'logins', 'registrations', 'statuses', 'day', 'last_read_id'): | ||
100 | if (key in json_object and isinstance(json_object[key], six.text_type)): | ||
101 | try: | ||
102 | json_object[key] = int(json_object[key]) | ||
103 | except ValueError: | ||
104 | pass | ||
105 | |||
106 | return json_object | ||
107 | |||
108 | @staticmethod | ||
109 | def __json_hooks(json_object): | ||
110 | """ | ||
111 | All the json hooks. Used in request parsing. | ||
112 | """ | ||
113 | json_object = Mastodon.__json_strnum_to_bignum(json_object) | ||
114 | json_object = Mastodon.__json_date_parse(json_object) | ||
115 | json_object = Mastodon.__json_truefalse_parse(json_object) | ||
116 | json_object = Mastodon.__json_allow_dict_attrs(json_object) | ||
117 | return json_object | ||
118 | |||
119 | @staticmethod | ||
120 | def __consistent_isoformat_utc(datetime_val): | ||
121 | """ | ||
122 | Function that does what isoformat does but it actually does the same | ||
123 | every time instead of randomly doing different things on some systems | ||
124 | and also it represents that time as the equivalent UTC time. | ||
125 | """ | ||
126 | isotime = datetime_val.astimezone(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%S%z") | ||
127 | if isotime[-2] != ":": | ||
128 | isotime = isotime[:-2] + ":" + isotime[-2:] | ||
129 | return isotime | ||
130 | |||
131 | def __api_request(self, method, endpoint, params={}, files={}, headers={}, access_token_override=None, base_url_override=None, | ||
132 | do_ratelimiting=True, use_json=False, parse=True, return_response_object=False, skip_error_check=False, lang_override=None): | ||
133 | """ | ||
134 | Internal API request helper. | ||
135 | """ | ||
136 | response = None | ||
137 | remaining_wait = 0 | ||
138 | |||
139 | # Add language to params if not None | ||
140 | lang = self.lang | ||
141 | if lang_override is not None: | ||
142 | lang = lang_override | ||
143 | if lang is not None: | ||
144 | params["lang"] = lang | ||
145 | |||
146 | # "pace" mode ratelimiting: Assume constant rate of requests, sleep a little less long than it | ||
147 | # would take to not hit the rate limit at that request rate. | ||
148 | if do_ratelimiting and self.ratelimit_method == "pace": | ||
149 | if self.ratelimit_remaining == 0: | ||
150 | to_next = self.ratelimit_reset - time.time() | ||
151 | if to_next > 0: | ||
152 | # As a precaution, never sleep longer than 5 minutes | ||
153 | to_next = min(to_next, 5 * 60) | ||
154 | time.sleep(to_next) | ||
155 | else: | ||
156 | time_waited = time.time() - self.ratelimit_lastcall | ||
157 | time_wait = float(self.ratelimit_reset - time.time()) / float(self.ratelimit_remaining) | ||
158 | remaining_wait = time_wait - time_waited | ||
159 | |||
160 | if remaining_wait > 0: | ||
161 | to_next = remaining_wait / self.ratelimit_pacefactor | ||
162 | to_next = min(to_next, 5 * 60) | ||
163 | time.sleep(to_next) | ||
164 | |||
165 | # Generate request headers | ||
166 | headers = copy.deepcopy(headers) | ||
167 | if self.access_token is not None: | ||
168 | headers['Authorization'] = 'Bearer ' + self.access_token | ||
169 | if access_token_override is not None: | ||
170 | headers['Authorization'] = 'Bearer ' + access_token_override | ||
171 | |||
172 | # Add user-agent | ||
173 | if self.user_agent: | ||
174 | headers['User-Agent'] = self.user_agent | ||
175 | |||
176 | # Determine base URL | ||
177 | base_url = self.api_base_url | ||
178 | if base_url_override is not None: | ||
179 | base_url = base_url_override | ||
180 | |||
181 | if self.debug_requests: | ||
182 | print('Mastodon: Request to endpoint "' + base_url + | ||
183 | endpoint + '" using method "' + method + '".') | ||
184 | print('Parameters: ' + str(params)) | ||
185 | print('Headers: ' + str(headers)) | ||
186 | print('Files: ' + str(files)) | ||
187 | |||
188 | # Make request | ||
189 | request_complete = False | ||
190 | while not request_complete: | ||
191 | request_complete = True | ||
192 | |||
193 | response_object = None | ||
194 | try: | ||
195 | kwargs = dict(headers=headers, files=files, timeout=self.request_timeout) | ||
196 | if use_json: | ||
197 | kwargs['json'] = params | ||
198 | elif method == 'GET': | ||
199 | kwargs['params'] = params | ||
200 | else: | ||
201 | kwargs['data'] = params | ||
202 | |||
203 | response_object = self.session.request(method, base_url + endpoint, **kwargs) | ||
204 | except Exception as e: | ||
205 | raise MastodonNetworkError("Could not complete request: %s" % e) | ||
206 | |||
207 | if response_object is None: | ||
208 | raise MastodonIllegalArgumentError("Illegal request.") | ||
209 | |||
210 | # Parse rate limiting headers | ||
211 | if 'X-RateLimit-Remaining' in response_object.headers and do_ratelimiting: | ||
212 | self.ratelimit_remaining = int( | ||
213 | response_object.headers['X-RateLimit-Remaining']) | ||
214 | self.ratelimit_limit = int( | ||
215 | response_object.headers['X-RateLimit-Limit']) | ||
216 | |||
217 | # For gotosocial, we need an int representation, but for non-ints this would crash | ||
218 | try: | ||
219 | ratelimit_intrep = str( | ||
220 | int(response_object.headers['X-RateLimit-Reset'])) | ||
221 | except: | ||
222 | ratelimit_intrep = None | ||
223 | |||
224 | try: | ||
225 | if ratelimit_intrep is not None and ratelimit_intrep == response_object.headers['X-RateLimit-Reset']: | ||
226 | self.ratelimit_reset = int( | ||
227 | response_object.headers['X-RateLimit-Reset']) | ||
228 | else: | ||
229 | ratelimit_reset_datetime = dateutil.parser.parse(response_object.headers['X-RateLimit-Reset']) | ||
230 | self.ratelimit_reset = self.__datetime_to_epoch(ratelimit_reset_datetime) | ||
231 | |||
232 | # Adjust server time to local clock | ||
233 | if 'Date' in response_object.headers: | ||
234 | server_time_datetime = dateutil.parser.parse(response_object.headers['Date']) | ||
235 | server_time = self.__datetime_to_epoch(server_time_datetime) | ||
236 | server_time_diff = time.time() - server_time | ||
237 | self.ratelimit_reset += server_time_diff | ||
238 | self.ratelimit_lastcall = time.time() | ||
239 | except Exception as e: | ||
240 | raise MastodonRatelimitError("Rate limit time calculations failed: %s" % e) | ||
241 | |||
242 | # Handle response | ||
243 | if self.debug_requests: | ||
244 | print('Mastodon: Response received with code ' + str(response_object.status_code) + '.') | ||
245 | print('response headers: ' + str(response_object.headers)) | ||
246 | print('Response text content: ' + str(response_object.text)) | ||
247 | |||
248 | if not response_object.ok: | ||
249 | try: | ||
250 | response = response_object.json(object_hook=self.__json_hooks) | ||
251 | if isinstance(response, dict) and 'error' in response: | ||
252 | error_msg = response['error'] | ||
253 | elif isinstance(response, str): | ||
254 | error_msg = response | ||
255 | else: | ||
256 | error_msg = None | ||
257 | except ValueError: | ||
258 | error_msg = None | ||
259 | |||
260 | # Handle rate limiting | ||
261 | if response_object.status_code == 429: | ||
262 | if self.ratelimit_method == 'throw' or not do_ratelimiting: | ||
263 | raise MastodonRatelimitError('Hit rate limit.') | ||
264 | elif self.ratelimit_method in ('wait', 'pace'): | ||
265 | to_next = self.ratelimit_reset - time.time() | ||
266 | if to_next > 0: | ||
267 | # As a precaution, never sleep longer than 5 minutes | ||
268 | to_next = min(to_next, 5 * 60) | ||
269 | time.sleep(to_next) | ||
270 | request_complete = False | ||
271 | continue | ||
272 | |||
273 | if not skip_error_check: | ||
274 | if response_object.status_code == 404: | ||
275 | ex_type = MastodonNotFoundError | ||
276 | if not error_msg: | ||
277 | error_msg = 'Endpoint not found.' | ||
278 | # this is for compatibility with older versions | ||
279 | # which raised MastodonAPIError('Endpoint not found.') | ||
280 | # on any 404 | ||
281 | elif response_object.status_code == 401: | ||
282 | ex_type = MastodonUnauthorizedError | ||
283 | elif response_object.status_code == 500: | ||
284 | ex_type = MastodonInternalServerError | ||
285 | elif response_object.status_code == 502: | ||
286 | ex_type = MastodonBadGatewayError | ||
287 | elif response_object.status_code == 503: | ||
288 | ex_type = MastodonServiceUnavailableError | ||
289 | elif response_object.status_code == 504: | ||
290 | ex_type = MastodonGatewayTimeoutError | ||
291 | elif response_object.status_code >= 500 and response_object.status_code <= 511: | ||
292 | ex_type = MastodonServerError | ||
293 | else: | ||
294 | ex_type = MastodonAPIError | ||
295 | |||
296 | raise ex_type('Mastodon API returned error', response_object.status_code, response_object.reason, error_msg) | ||
297 | |||
298 | if return_response_object: | ||
299 | return response_object | ||
300 | |||
301 | if parse: | ||
302 | try: | ||
303 | response = response_object.json(object_hook=self.__json_hooks) | ||
304 | except: | ||
305 | raise MastodonAPIError( | ||
306 | "Could not parse response as JSON, response code was %s, " | ||
307 | "bad json content was '%s'" % (response_object.status_code, | ||
308 | response_object.content)) | ||
309 | else: | ||
310 | response = response_object.content | ||
311 | |||
312 | # Parse link headers | ||
313 | if isinstance(response, list) and \ | ||
314 | 'Link' in response_object.headers and \ | ||
315 | response_object.headers['Link'] != "": | ||
316 | response = AttribAccessList(response) | ||
317 | tmp_urls = requests.utils.parse_header_links( | ||
318 | response_object.headers['Link'].rstrip('>').replace('>,<', ',<')) | ||
319 | for url in tmp_urls: | ||
320 | if 'rel' not in url: | ||
321 | continue | ||
322 | |||
323 | if url['rel'] == 'next': | ||
324 | # Be paranoid and extract max_id specifically | ||
325 | next_url = url['url'] | ||
326 | matchgroups = re.search(r"[?&]max_id=([^&]+)", next_url) | ||
327 | |||
328 | if matchgroups: | ||
329 | next_params = copy.deepcopy(params) | ||
330 | next_params['_pagination_method'] = method | ||
331 | next_params['_pagination_endpoint'] = endpoint | ||
332 | max_id = matchgroups.group(1) | ||
333 | if max_id.isdigit(): | ||
334 | next_params['max_id'] = int(max_id) | ||
335 | else: | ||
336 | next_params['max_id'] = max_id | ||
337 | if "since_id" in next_params: | ||
338 | del next_params['since_id'] | ||
339 | if "min_id" in next_params: | ||
340 | del next_params['min_id'] | ||
341 | response._pagination_next = next_params | ||
342 | |||
343 | # Maybe other API users rely on the pagination info in the last item | ||
344 | # Will be removed in future | ||
345 | if isinstance(response[-1], AttribAccessDict): | ||
346 | response[-1]._pagination_next = next_params | ||
347 | |||
348 | if url['rel'] == 'prev': | ||
349 | # Be paranoid and extract since_id or min_id specifically | ||
350 | prev_url = url['url'] | ||
351 | |||
352 | # Old and busted (pre-2.6.0): since_id pagination | ||
353 | matchgroups = re.search( | ||
354 | r"[?&]since_id=([^&]+)", prev_url) | ||
355 | if matchgroups: | ||
356 | prev_params = copy.deepcopy(params) | ||
357 | prev_params['_pagination_method'] = method | ||
358 | prev_params['_pagination_endpoint'] = endpoint | ||
359 | since_id = matchgroups.group(1) | ||
360 | if since_id.isdigit(): | ||
361 | prev_params['since_id'] = int(since_id) | ||
362 | else: | ||
363 | prev_params['since_id'] = since_id | ||
364 | if "max_id" in prev_params: | ||
365 | del prev_params['max_id'] | ||
366 | response._pagination_prev = prev_params | ||
367 | |||
368 | # Maybe other API users rely on the pagination info in the first item | ||
369 | # Will be removed in future | ||
370 | if isinstance(response[0], AttribAccessDict): | ||
371 | response[0]._pagination_prev = prev_params | ||
372 | |||
373 | # New and fantastico (post-2.6.0): min_id pagination | ||
374 | matchgroups = re.search( | ||
375 | r"[?&]min_id=([^&]+)", prev_url) | ||
376 | if matchgroups: | ||
377 | prev_params = copy.deepcopy(params) | ||
378 | prev_params['_pagination_method'] = method | ||
379 | prev_params['_pagination_endpoint'] = endpoint | ||
380 | min_id = matchgroups.group(1) | ||
381 | if min_id.isdigit(): | ||
382 | prev_params['min_id'] = int(min_id) | ||
383 | else: | ||
384 | prev_params['min_id'] = min_id | ||
385 | if "max_id" in prev_params: | ||
386 | del prev_params['max_id'] | ||
387 | response._pagination_prev = prev_params | ||
388 | |||
389 | # Maybe other API users rely on the pagination info in the first item | ||
390 | # Will be removed in future | ||
391 | if isinstance(response[0], AttribAccessDict): | ||
392 | response[0]._pagination_prev = prev_params | ||
393 | |||
394 | return response | ||
395 | |||
396 | def __get_streaming_base(self): | ||
397 | """ | ||
398 | Internal streaming API helper. | ||
399 | |||
400 | Returns the correct URL for the streaming API. | ||
401 | """ | ||
402 | instance = self.instance() | ||
403 | if "streaming_api" in instance["urls"] and instance["urls"]["streaming_api"] != self.api_base_url: | ||
404 | # This is probably a websockets URL, which is really for the browser, but requests can't handle it | ||
405 | # So we do this below to turn it into an HTTPS or HTTP URL | ||
406 | parse = urlparse(instance["urls"]["streaming_api"]) | ||
407 | if parse.scheme == 'wss': | ||
408 | url = "https://" + parse.netloc | ||
409 | elif parse.scheme == 'ws': | ||
410 | url = "http://" + parse.netloc | ||
411 | else: | ||
412 | raise MastodonAPIError( | ||
413 | "Could not parse streaming api location returned from server: {}.".format( | ||
414 | instance["urls"]["streaming_api"])) | ||
415 | else: | ||
416 | url = self.api_base_url | ||
417 | return url | ||
418 | |||
419 | def __stream(self, endpoint, listener, params={}, run_async=False, timeout=_DEFAULT_STREAM_TIMEOUT, reconnect_async=False, reconnect_async_wait_sec=_DEFAULT_STREAM_RECONNECT_WAIT_SEC): | ||
420 | """ | ||
421 | Internal streaming API helper. | ||
422 | |||
423 | Returns a handle to the open connection that the user can close if they | ||
424 | wish to terminate it. | ||
425 | """ | ||
426 | |||
427 | # Check if we have to redirect | ||
428 | url = self.__get_streaming_base() | ||
429 | |||
430 | # The streaming server can't handle two slashes in a path, so remove trailing slashes | ||
431 | if url[-1] == '/': | ||
432 | url = url[:-1] | ||
433 | |||
434 | # Connect function (called and then potentially passed to async handler) | ||
435 | def connect_func(): | ||
436 | headers = {"Authorization": "Bearer " + | ||
437 | self.access_token} if self.access_token else {} | ||
438 | if self.user_agent: | ||
439 | headers['User-Agent'] = self.user_agent | ||
440 | connection = self.session.get(url + endpoint, headers=headers, data=params, stream=True, | ||
441 | timeout=(self.request_timeout, timeout)) | ||
442 | |||
443 | if connection.status_code != 200: | ||
444 | raise MastodonNetworkError( | ||
445 | "Could not connect to streaming server: %s" % connection.reason) | ||
446 | return connection | ||
447 | connection = None | ||
448 | |||
449 | # Async stream handler | ||
450 | class __stream_handle(): | ||
451 | def __init__(self, connection, connect_func, reconnect_async, reconnect_async_wait_sec): | ||
452 | self.closed = False | ||
453 | self.running = True | ||
454 | self.connection = connection | ||
455 | self.connect_func = connect_func | ||
456 | self.reconnect_async = reconnect_async | ||
457 | self.reconnect_async_wait_sec = reconnect_async_wait_sec | ||
458 | self.reconnecting = False | ||
459 | |||
460 | def close(self): | ||
461 | self.closed = True | ||
462 | if self.connection is not None: | ||
463 | self.connection.close() | ||
464 | |||
465 | def is_alive(self): | ||
466 | return self._thread.is_alive() | ||
467 | |||
468 | def is_receiving(self): | ||
469 | if self.closed or not self.running or self.reconnecting or not self.is_alive(): | ||
470 | return False | ||
471 | else: | ||
472 | return True | ||
473 | |||
474 | def _sleep_attentive(self): | ||
475 | if self._thread != threading.current_thread(): | ||
476 | raise RuntimeError( | ||
477 | "Illegal call from outside the stream_handle thread") | ||
478 | time_remaining = self.reconnect_async_wait_sec | ||
479 | while time_remaining > 0 and not self.closed: | ||
480 | time.sleep(0.5) | ||
481 | time_remaining -= 0.5 | ||
482 | |||
483 | def _threadproc(self): | ||
484 | self._thread = threading.current_thread() | ||
485 | |||
486 | # Run until closed or until error if not autoreconnecting | ||
487 | while self.running: | ||
488 | if self.connection is not None: | ||
489 | with closing(self.connection) as r: | ||
490 | try: | ||
491 | listener.handle_stream(r) | ||
492 | except (AttributeError, MastodonMalformedEventError, MastodonNetworkError) as e: | ||
493 | if not (self.closed or self.reconnect_async): | ||
494 | raise e | ||
495 | else: | ||
496 | if self.closed: | ||
497 | self.running = False | ||
498 | |||
499 | # Reconnect loop. Try immediately once, then with delays on error. | ||
500 | if (self.reconnect_async and not self.closed) or self.connection is None: | ||
501 | self.reconnecting = True | ||
502 | connect_success = False | ||
503 | while not connect_success: | ||
504 | if self.closed: | ||
505 | # Someone from outside stopped the streaming | ||
506 | self.running = False | ||
507 | break | ||
508 | try: | ||
509 | the_connection = self.connect_func() | ||
510 | if the_connection.status_code != 200: | ||
511 | exception = MastodonNetworkError(f"Could not connect to server. " | ||
512 | f"HTTP status: {the_connection.status_code}") | ||
513 | listener.on_abort(exception) | ||
514 | self._sleep_attentive() | ||
515 | if self.closed: | ||
516 | # Here we have maybe a rare race condition. Exactly on connect, someone | ||
517 | # stopped the streaming before. We close the previous established connection: | ||
518 | the_connection.close() | ||
519 | else: | ||
520 | self.connection = the_connection | ||
521 | connect_success = True | ||
522 | except: | ||
523 | self._sleep_attentive() | ||
524 | connect_success = False | ||
525 | self.reconnecting = False | ||
526 | else: | ||
527 | self.running = False | ||
528 | return 0 | ||
529 | |||
530 | if run_async: | ||
531 | handle = __stream_handle( | ||
532 | connection, connect_func, reconnect_async, reconnect_async_wait_sec) | ||
533 | t = threading.Thread(args=(), target=handle._threadproc) | ||
534 | t.daemon = True | ||
535 | t.start() | ||
536 | return handle | ||
537 | else: | ||
538 | # Blocking, never returns (can only leave via exception) | ||
539 | connection = connect_func() | ||
540 | with closing(connection) as r: | ||
541 | listener.handle_stream(r) | ||
542 | |||
543 | def __generate_params(self, params, exclude=[]): | ||
544 | """ | ||
545 | Internal named-parameters-to-dict helper. | ||
546 | |||
547 | Note for developers: If called with locals() as params, | ||
548 | as is the usual practice in this code, the __generate_params call | ||
549 | (or at least the locals() call) should generally be the first thing | ||
550 | in your function. | ||
551 | """ | ||
552 | params = collections.OrderedDict(params) | ||
553 | |||
554 | if 'self' in params: | ||
555 | del params['self'] | ||
556 | |||
557 | param_keys = list(params.keys()) | ||
558 | for key in param_keys: | ||
559 | if isinstance(params[key], bool): | ||
560 | params[key] = '1' if params[key] else '0' | ||
561 | |||
562 | for key in param_keys: | ||
563 | if params[key] is None or key in exclude: | ||
564 | del params[key] | ||
565 | |||
566 | param_keys = list(params.keys()) | ||
567 | for key in param_keys: | ||
568 | if isinstance(params[key], list): | ||
569 | params[key + "[]"] = params[key] | ||
570 | del params[key] | ||
571 | |||
572 | return params | ||
573 | |||
574 | def __unpack_id(self, id, dateconv=False): | ||
575 | """ | ||
576 | Internal object-to-id converter | ||
577 | |||
578 | Checks if id is a dict that contains id and | ||
579 | returns the id inside, otherwise just returns | ||
580 | the id straight. | ||
581 | |||
582 | Also unpacks datetimes to snowflake IDs if requested. | ||
583 | """ | ||
584 | if isinstance(id, dict) and "id" in id: | ||
585 | id = id["id"] | ||
586 | if dateconv and isinstance(id, datetime.datetime): | ||
587 | id = (int(id.timestamp()) << 16) * 1000 | ||
588 | return id | ||
589 | |||
590 | def __decode_webpush_b64(self, data): | ||
591 | """ | ||
592 | Re-pads and decodes urlsafe base64. | ||
593 | """ | ||
594 | missing_padding = len(data) % 4 | ||
595 | if missing_padding != 0: | ||
596 | data += '=' * (4 - missing_padding) | ||
597 | return base64.urlsafe_b64decode(data) | ||
598 | |||
599 | def __get_token_expired(self): | ||
600 | """Internal helper for oauth code""" | ||
601 | return self._token_expired < datetime.datetime.now() | ||
602 | |||
603 | def __set_token_expired(self, value): | ||
604 | """Internal helper for oauth code""" | ||
605 | self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value) | ||
606 | return | ||
607 | |||
608 | def __get_refresh_token(self): | ||
609 | """Internal helper for oauth code""" | ||
610 | return self._refresh_token | ||
611 | |||
612 | def __set_refresh_token(self, value): | ||
613 | """Internal helper for oauth code""" | ||
614 | self._refresh_token = value | ||
615 | return | ||
616 | |||
617 | def __guess_type(self, media_file): | ||
618 | """Internal helper to guess media file type""" | ||
619 | mime_type = None | ||
620 | try: | ||
621 | mime_type = magic.from_file(media_file, mime=True) | ||
622 | except AttributeError: | ||
623 | mime_type = mimetypes.guess_type(media_file)[0] | ||
624 | return mime_type | ||
625 | |||
626 | def __load_media_file(self, media_file, mime_type=None, file_name=None): | ||
627 | if isinstance(media_file, PurePath): | ||
628 | media_file = str(media_file) | ||
629 | if isinstance(media_file, str) and os.path.isfile(media_file): | ||
630 | mime_type = self.__guess_type(media_file) | ||
631 | media_file = open(media_file, 'rb') | ||
632 | elif isinstance(media_file, str) and os.path.isfile(media_file): | ||
633 | media_file = open(media_file, 'rb') | ||
634 | if mime_type is None: | ||
635 | raise MastodonIllegalArgumentError('Could not determine mime type or data passed directly without mime type.') | ||
636 | if file_name is None: | ||
637 | random_suffix = uuid.uuid4().hex | ||
638 | file_name = "mastodonpyupload_" + str(time.time()) + "_" + str(random_suffix) + mimetypes.guess_extension(mime_type) | ||
639 | return (file_name, media_file, mime_type) | ||
640 | |||
641 | @staticmethod | ||
642 | def __protocolize(base_url): | ||
643 | """Internal add-protocol-to-url helper""" | ||
644 | if not base_url.startswith("http://") and not base_url.startswith("https://"): | ||
645 | base_url = "https://" + base_url | ||
646 | |||
647 | # Some API endpoints can't handle extra /'s in path requests | ||
648 | base_url = base_url.rstrip("/") | ||
649 | return base_url | ||
650 | |||
651 | @staticmethod | ||
652 | def __deprotocolize(base_url): | ||
653 | """Internal helper to strip http and https from a URL""" | ||
654 | if base_url.startswith("http://"): | ||
655 | base_url = base_url[7:] | ||
656 | elif base_url.startswith("https://") or base_url.startswith("onion://"): | ||
657 | base_url = base_url[8:] | ||
658 | return base_url | ||
diff --git a/mastodon/utility.py b/mastodon/utility.py new file mode 100644 index 0000000..f393aa8 --- /dev/null +++ b/mastodon/utility.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # utility.py - utility functions, externally usable | ||
2 | |||
3 | import re | ||
4 | from decorator import decorate | ||
5 | from .error import MastodonVersionError | ||
6 | |||
7 | ### | ||
8 | # Version check functions, including decorator and parser | ||
9 | ### | ||
10 | def parse_version_string(version_string): | ||
11 | """Parses a semver version string, stripping off "rc" stuff if present.""" | ||
12 | string_parts = version_string.split(".") | ||
13 | version_parts = ( | ||
14 | int(re.match("([0-9]*)", string_parts[0]).group(0)), | ||
15 | int(re.match("([0-9]*)", string_parts[1]).group(0)), | ||
16 | int(re.match("([0-9]*)", string_parts[2]).group(0)) | ||
17 | ) | ||
18 | return version_parts | ||
19 | |||
20 | def max_version(*version_strings): | ||
21 | """Returns the maximum version of all provided version strings.""" | ||
22 | return max(version_strings, key=parse_version_string) | ||
23 | |||
24 | def api_version(created_ver, last_changed_ver, return_value_ver): | ||
25 | """Version check decorator. Currently only checks Bigger Than.""" | ||
26 | def api_min_version_decorator(function): | ||
27 | def wrapper(function, self, *args, **kwargs): | ||
28 | if not self.version_check_mode == "none": | ||
29 | if self.version_check_mode == "created": | ||
30 | version = created_ver | ||
31 | else: | ||
32 | version = max_version(last_changed_ver, return_value_ver) | ||
33 | major, minor, patch = parse_version_string(version) | ||
34 | if major > self.mastodon_major: | ||
35 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
36 | elif major == self.mastodon_major and minor > self.mastodon_minor: | ||
37 | raise MastodonVersionError("Version check failed (Need version " + version + ")") | ||
38 | elif major == self.mastodon_major and minor == self.mastodon_minor and patch > self.mastodon_patch: | ||
39 | raise MastodonVersionError("Version check failed (Need version " + version + ", patch is " + str(self.mastodon_patch) + ")") | ||
40 | return function(self, *args, **kwargs) | ||
41 | function.__doc__ = function.__doc__ + "\n\n *Added: Mastodon v" + \ | ||
42 | created_ver + ", last changed: Mastodon v" + last_changed_ver + "*" | ||
43 | return decorate(function, wrapper) | ||
44 | return api_min_version_decorator | ||
45 | |||
46 | ### | ||
47 | # Dict helper class. | ||
48 | # Defined at top level so it can be pickled. | ||
49 | ### | ||
50 | class AttribAccessDict(dict): | ||
51 | def __getattr__(self, attr): | ||
52 | if attr in self: | ||
53 | return self[attr] | ||
54 | else: | ||
55 | raise AttributeError("Attribute not found: " + str(attr)) | ||
56 | |||
57 | def __setattr__(self, attr, val): | ||
58 | if attr in self: | ||
59 | raise AttributeError("Attribute-style access is read only") | ||
60 | super(AttribAccessDict, self).__setattr__(attr, val) | ||
61 | |||
62 | |||
63 | ### | ||
64 | # List helper class. | ||
65 | # Defined at top level so it can be pickled. | ||
66 | ### | ||
67 | class AttribAccessList(list): | ||
68 | def __getattr__(self, attr): | ||
69 | if attr in self: | ||
70 | return self[attr] | ||
71 | else: | ||
72 | raise AttributeError("Attribute not found: " + str(attr)) | ||
73 | |||
74 | def __setattr__(self, attr, val): | ||
75 | if attr in self: | ||
76 | raise AttributeError("Attribute-style access is read only") | ||
77 | super(AttribAccessList, self).__setattr__(attr, val) \ No newline at end of file | ||