Compare commits

...

56 Commits

Author SHA1 Message Date
2b6d0dd808 docs: Adjust pyproject description 2023-12-19 09:24:02 +01:00
c2c2c23337 feat: Add package version 2023-12-19 09:21:06 +01:00
Georg Krause
a91eea5922 test: Add test infrastructure and basic model testing 2023-07-26 14:35:57 +02:00
282031245c Rename from mastodon->fediverse 2023-07-26 12:43:34 +02:00
Georg Krause
d47a63e331 ci: Don't push built docker image on PRs 2023-07-13 09:52:26 +02:00
Georg Krause
3cdf24a5f3 fix(exporter): Default to toml as default format 2023-07-13 09:51:54 +02:00
735bb5fe6d Add merge functionality 2023-07-13 09:50:42 +02:00
2180f28c78 Add support for diff and deploy for GTS 2023-05-04 10:47:06 +02:00
27168a2a6e Allow missing values of remote block for GTS support 2023-05-04 10:38:25 +02:00
569cff0957 Add json 2023-05-03 15:40:27 +02:00
e1d9fe04f9 Merge branch 'rework_export' 2023-05-03 15:14:23 +02:00
2984729841 Upadte reference in readme 2023-05-03 15:11:56 +02:00
58429a39f0 Add csv format option 2023-05-03 15:09:12 +02:00
a646714f76 Remove double output, typo 2023-05-03 14:48:09 +02:00
173aac081d Update help 2023-05-03 14:48:09 +02:00
d41bd5322d Rework exporter 2023-05-03 14:48:08 +02:00
fe12631ee4 Remove double output, typo 2023-05-03 14:45:53 +02:00
7d986a7072 Update help 2023-05-03 11:53:41 +02:00
4668d9023e Typos 2023-05-03 11:48:20 +02:00
46b0fe6b50 Rework exporter 2023-05-03 11:30:28 +02:00
3dce62417e refactor: Cleaner solution for exportable dict 2023-01-26 12:05:12 +01:00
Georg Krause
d4c754c103
Merge branch 'develop' 2023-01-26 11:54:22 +01:00
8a8a725002 fix: Remove id from export
As this wil not be the same id on other instances exporting it does not make sense
2023-01-25 23:36:25 +01:00
58998e1c17 fix: Allow name to be empty in local blocklist 2023-01-25 23:35:04 +01:00
a484a41b45 Merge branch 'develop' of https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy into develop 2023-01-22 18:23:54 +01:00
Georg Krause
7ad318bc48
feat: Build docker image in CI 2023-01-18 12:02:56 +01:00
Georg Krause
8d5676d0b2
refactor: Avoid manually templating toml file entries 2023-01-13 13:56:56 +01:00
6d2a4d82b4
fix: Avoid exception when input is missing for diff and deploy 2023-01-13 13:56:55 +01:00
Georg Krause
d9d3f02fda
feat: Add Dockerfile for development and deployment 2023-01-13 13:56:55 +01:00
0fca58810a
feat: Allow token via environment variables 2023-01-13 13:56:55 +01:00
181ac45bbf
refactor: Rename load_remote_blocklist->load_blocklist_from_instance 2023-01-13 13:56:43 +01:00
6a2a13bd74
refactor: Add missing return types 2023-01-13 13:56:43 +01:00
0dd6930c0f
refactor: Rename load_local_blocklist -> load_blocklist_file 2023-01-13 13:56:43 +01:00
33fee03059 Merge branch 'develop' of https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy into develop 2023-01-13 08:12:35 +01:00
2066c0332d fix: Avoid exception when input is missing for diff and deploy 2023-01-13 08:12:29 +01:00
moanos
5376af3e7e Merge pull request 'refactor: Avoid manually templating toml file entries' (#6) from toml-dump into develop
Reviewed-on: https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy/pulls/6
2023-01-13 08:08:26 +01:00
moanos
1565f17778 Merge pull request 'feat: Add Dockerfile for development and deployment' (#5) from dockerfile into develop
Reviewed-on: https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy/pulls/5
2023-01-12 22:27:21 +01:00
Georg Krause
229608a090
refactor: Avoid manually templating toml file entries 2023-01-12 17:11:10 +01:00
Georg Krause
ddc2ba1b43
feat: Add Dockerfile for development and deployment 2023-01-12 16:31:58 +01:00
0b49740e83 feat: Allow token via environment variables 2023-01-12 16:22:03 +01:00
c7872201ea Merge branch 'develop' of https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy into develop 2023-01-12 16:16:41 +01:00
ce5c1ae39d refactor: Rename load_remote_blocklist->load_blocklist_from_instance 2023-01-12 16:16:18 +01:00
da984d80e4 refactor: Rename load_remote_blocklist->load_blocklist_from_instance 2023-01-12 16:06:01 +01:00
eaccce8c6e refactor: Add missing return types 2023-01-12 16:04:56 +01:00
066e77d493 refactor: Rename load_local_blocklist -> load_blocklist_file 2023-01-12 16:01:55 +01:00
moanos
b4ef4b9199 Merge pull request 'fix: Avoid exception when output is missing for exports' (#4) from fix-exception-with-missing-output into develop
Reviewed-on: https://git.gieszer.link/gcrkrause/mastodon-blocklist-deploy/pulls/4
2023-01-12 15:56:07 +01:00
Georg Krause
0ecc925373
fix: Avoid exception when output is missing for exports 2023-01-12 12:36:46 +01:00
c54beb76d3 fix #3
A conversion to a lowercase string is needed for boolean values
2023-01-12 09:03:37 +01:00
80d66b1919 Add some documentation 2023-01-12 08:40:25 +01:00
c1e4770b0e Restructure project to make a valid poetry project, add script 2023-01-12 08:00:36 +01:00
45f52b940e Add documentation 2023-01-12 07:46:11 +01:00
7c54a1286a Add newlines for better overview 2023-01-12 07:42:23 +01:00
0a20bb3e8d Fix logging strings 2023-01-11 21:27:20 +01:00
288527a76a Formatting 2023-01-11 20:40:05 +01:00
ba9c29a3ab Fix bug with no-delete option 2023-01-11 20:39:46 +01:00
4ddac75d9a Add nodelete option 2023-01-11 20:21:01 +01:00
13 changed files with 952 additions and 166 deletions

40
.drone.yml Normal file
View File

@ -0,0 +1,40 @@
---
kind: pipeline
type: docker
name: test
steps:
- name: test
image: python:3.11
commands:
- pip install poetry
- poetry run pytest --cov-report term-missing --cov=fediverse_blocklist_deploy tests
---
kind: pipeline
type: exec
name: build
platform:
os: linux
arch: arm64
steps:
- name: build
commands:
- docker build -t gcrkrause/fediverse-blocklist-deploy .
- name: push
environment:
USERNAME:
from_secret: docker-hub-user
PASSWORD:
from_secret: docker-hub-pw
commands:
- docker login -u $USERNAME -p $PASSWORD
- docker push gcrkrause/fediverse-blocklist-deploy
- docker image prune -a -f
when:
event:
exclude:
- pull_request
branch:
- main

17
DEVELOPMENT.md Normal file
View File

@ -0,0 +1,17 @@
# Development Guide
## Docker
In order to have a common development environment, its nice to use docker. Its quite easy. To build a new image, simply run
`docker build . -t fediverse_blocklist_deploy`
Now you can execute any commands using
`docker run --rm fediverse_blocklist_deploy --help`
If you want to avoid building new containers for each change, simply mount your code into the container using
`docker run --rm -v $(pwd):/app fediverse_blocklist_deploy`
Please be aware that changes to the package itself require a rebuild anyways.

12
Dockerfile Normal file
View File

@ -0,0 +1,12 @@
FROM python:3.11-slim
ENV PYTHONDONTWRITEBYTECODE=1
ENV PYTHONUNBUFFERED=1
COPY pyproject.toml poetry.lock README.md /app/
COPY fediverse_blocklist_deploy /app/fediverse_blocklist_deploy
WORKDIR /app
ENTRYPOINT ["fediverse_blocklist_deploy"]
RUN pip install -e .

View File

@ -1,19 +1,99 @@
# mastodon-blocklist-deploy # fediverse-blocklist-deploy
A small tool to deploy blocklist updates to a mastodon server using its API. A small tool to deploy blocklist updates to a fediverse server using its API.
## Concept ## Concept
The idea is to maintain a blocklist in a simple structured file in this repository. All changes need to be deployed to The idea is to maintain a blocklist in a simple structured file in this repository. All changes need to be deployed to
the mastodon server, this is supposed to be automated with Drone CI. the fediverse server, this is supposed to be automated with Drone CI.
In order to compare the list entries, we can read the whole blocklist In order to compare the list entries, we can read the whole blocklist
using [the get endpoint](https://docs.joinmastodon.org/methods/admin/domain_blocks/#get). At the same time we read the using [the get endpoint](https://docs.joinmastodon.org/methods/admin/domain_blocks/#get). At the same time we read the
whole file in the repository, make a comparision whole file in the repository, make a comparison
and [remove](https://docs.joinmastodon.org/methods/admin/domain_blocks/#delete) unblocked domains from the blocklist and [remove](https://docs.joinmastodon.org/methods/admin/domain_blocks/#delete) unblocked domains from the blocklist
and [add](https://docs.joinmastodon.org/methods/admin/domain_blocks/#create) newly added. and [add](https://docs.joinmastodon.org/methods/admin/domain_blocks/#create) newly added.
Since we have several attributes for a domain blog, a simple `.txt` file might not be sufficient. We probably want to Since we have several attributes for a domain block, a simple `.txt` file might not be sufficient. We probably want to
set the severity, reject_media, reject_reports and comments. This means we need a human readable, easily python-readable set the severity, reject_media, reject_reports and comments. This means we need a human-readable, easily python-readable
and structured file format. Since Python 3.11 got native support for [toml](https://toml.io/) and it and structured file format. Since Python 3.11 got native support for [toml](https://toml.io/) and it
supports [Array of Tables](https://toml.io/en/v1.0.0#array-of-tables), I'd prefer to use this. supports [Array of Tables](https://toml.io/en/v1.0.0#array-of-tables), I'd prefer to use this.
# Supported server types
- [x] Mastodon
- [X] GoToSocial
# Basic usage
##
```
usage: fediverse_blocklist_deploy [-h] [-s SERVER] [-t TOKEN] [-i INPUT_FILE] [-r REMOTE_BLOCKLIST] [-o OUTPUT] [-v] [-n]
[--format FORMAT] [--private]
{diff,deploy,export}
Deploy blocklist updates to a fediverse server
positional arguments:
{diff,deploy,export} Either use 'diff' to check the difference between local blockĺist and the blocklist on the server, 'deploy'
to apply the current local blocklist or 'export' to export the remote blocklist into a local file.
options:
-h, --help show this help message and exit
-s SERVER, --server SERVER
The address of the server where you want to deploy (e.g. mastodon.social)
-t TOKEN, --token TOKEN
Authorization token
-i INPUT_FILE, --input-file INPUT_FILE
The blocklist to use
-r REMOTE_BLOCKLIST, --remote-blocklist REMOTE_BLOCKLIST
The remote blocklist as json for debugging reasons
-o OUTPUT, --output OUTPUT
Filename where to export the blocklist
-v, --verbose
-n, --no-delete Do not delete existing blocks
--format FORMAT Export format: toml|markdown|csv|json
--private When the flag is set, private comment will also be exported.
```
## Obtain a server token
1. Be an admin on the server.
2. Add an application in the Mastodon Web Client (https://yourdomain.org/settings/applications/new. Make sure to select the permissions `admin:read` and `admin:write`.
3. Copy the Token (last value in the table) ![](assets/obtain_token.png)
# Typical workflow
1. **Export the current blocklist from the server**
```
fediverse_blocklist_deploy export -s yourserver -t yourtoken -o blocklist.toml
```
2. **Manually add something to the blocklist**
```toml
[[instances]]
name = "instance-to-block.com"
domain = "instance-to-block.com"
severity = "suspend"
reject_media = true
reject_reports = true
public_comment = "X, Y and Z"
private_comment = "We discussed this after X and Y and now that Z happend we decided to block"
```
3. **Check the difference between the local and remote blocklist**
```
fediverse_blocklist_deploy diff -s yourserver -t yourtoken -i blocklist.toml
```
4. **Apply the local blocklist to the server**
```
fediverse_blocklist_deploy apply -s yourserver -t yourtoken -i blocklist.toml
```

BIN
assets/obtain_token.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

104
cli.py
View File

@ -1,104 +0,0 @@
# import tomllib
import argparse
import json
import logging
import requests
import toml
from models import Instance
def load_local_blocklist(filename: str) -> [Instance]:
with open(filename, "r") as f:
data = toml.load(f)
instances = []
for instance_dict in data["instances"]:
instance = Instance(instance_dict)
instances.append(instance)
return instances
def export_blocklist_toml(blocklist: [Instance], filname: str):
toml_str = ""
for instance in blocklist:
toml_str += f'''
[[instances]]
name = "{instance.domain}"
domain = "{instance.domain}"
severity = "{instance.severity}"
reject_media = {str(instance.reject_media).lower()}
reject_reports = {str(instance.reject_reports).lower()}
public_comment = "{instance.public_comment}"
private_comment = "{instance.private_comment}"
'''
with open(filname, "w") as f:
f.write(toml_str)
def blocklist_json_to_instances(blocklist_json: str):
instances = []
for i in blocklist_json:
instances.append(Instance(i))
return instances
def load_remote_blocklist(server: str, token: str):
headers = {
f'Authorization': f'Bearer {token}',
}
response = requests.get(f'https://{server}/api/v1/admin/domain_blocks', headers=headers)
if response.status_code == 200:
blocklist_json = json.loads(response.content)
return blocklist_json_to_instances(blocklist_json)
else:
raise ConnectionError(f"Could not connect to the server ({response.status_code}: {response.reason})")
def cli():
parser = argparse.ArgumentParser(description='Deploy blocklist updates to a mastodon server')
parser.add_argument('action', choices=['diff', 'deploy', 'export'],
help="Either use 'diff' to check the difference between current blocks and future blocks, "
"'deploy' to apply the current local blocklist or 'export' to export the remote "
"blocklist into a local file.")
parser.add_argument('-s', '--server', help="The address of the server where you want to deploy (e.g. "
"mastodon.social)")
parser.add_argument('-t', '--token', help="Authorization token")
parser.add_argument('-i', '--input-file', help="The blocklist to use")
parser.add_argument('-r', '--remote-blocklist', help="The remote blocklist as json for debugging reasons")
parser.add_argument('-o', '--output', help="Filename where to export the blocklist")
parser.add_argument('-v', '--verbose',
action='store_true')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARN)
"""if there is a remote blocklist provided load this instead of fetching it from a server (for debugging reasons)"""
if args.remote_blocklist:
with open(args.remote_blocklist) as f:
remote_blocklist = blocklist_json_to_instances(json.load(f))
else:
remote_blocklist = load_remote_blocklist(server=args.server, token=args.token)
"""Load local blocklist only when needed"""
if args.action in ["diff", "deploy"]:
if args.input_file:
blocklist_filename = args.input_file
else:
blocklist_filename = "blocklist.toml"
local_blocklist = load_local_blocklist(blocklist_filename)
if args.action == "diff":
Instance.show_diffs(local_blocklist, remote_blocklist)
elif args.action == "deploy":
diffs = Instance.list_diffs(local_blocklist, remote_blocklist)
Instance.apply_blocks_from_diff(diffs, args.server, args.token)
elif args.action == "export":
export_blocklist_toml(remote_blocklist, args.output)
if __name__ == "__main__":
cli()

View File

@ -0,0 +1 @@
version = "0.1.0"

View File

@ -0,0 +1,151 @@
# import tomllib
import argparse
import json
import logging
import requests
import os
import toml
from fediverse_blocklist_deploy.models import Instance
from fediverse_blocklist_deploy.helpers import blocklist_to_markdown, blocklist_to_toml, blocklist_to_csv, \
blocklist_to_json
def load_blocklist_file(filename: str) -> [Instance]:
with open(filename, "r") as f:
data = toml.load(f)
instances = []
for instance_dict in data["instances"]:
instance = Instance(instance_dict)
instances.append(instance)
return instances
def blocklist_json_to_instances(blocklist_json: str) -> [Instance]:
instances = []
for i in blocklist_json:
instances.append(Instance(i))
return instances
def load_blocklist_from_instance(server: str, token: str) -> [Instance]:
headers = {
f'Authorization': f'Bearer {token}',
}
response = requests.get(f'https://{server}/api/v1/admin/domain_blocks', headers=headers)
if response.status_code == 200:
blocklist_json = json.loads(response.content)
return blocklist_json_to_instances(blocklist_json)
else:
raise ConnectionError(f"Could not connect to the server ({response.status_code}: {response.reason})")
def remove_key_from_dict(dict, key):
del dict[key]
return dict
def exporter(blocklist, output=None, format: str = "toml", private: bool = False):
if format == "toml":
exported_text = blocklist_to_toml(blocklist, private)
if format == "csv":
exported_text = blocklist_to_csv(blocklist, private)
if format == "markdown":
exported_text = blocklist_to_markdown(blocklist, private)
if format == "json":
exported_text = blocklist_to_json(blocklist, private)
# Output the text
if output is not None:
with open(output, "w") as f:
f.write(exported_text)
else:
print(exported_text)
def merge(input_file, merge_target, format: str = "toml", private: bool = False, overwrite=False):
input_blocklist = load_blocklist_file(input_file)
merge_target_blocklist = load_blocklist_file(merge_target)
for input_instance in input_blocklist:
# If the block is already there with the same parameters we do nothing
if input_instance in merge_target_blocklist:
continue
# Check if there is a domain in the merge target where the input domain is similar
try:
merge_target_instance = [merge_target_instance for merge_target_instance in merge_target if input_instance.domain == merge_target_instance.domain][0]
if not overwrite:
key_input = ""
while key_input not in ("i", "O"):
print(f"Different settings for {input_instance.domain} detected.")
print(f"In the input blocklist the setting is\n{input_instance} whereas it's {merge_target_instance} in the merge target")
key_input = input("Keep input (i) or original (o) [i/O]")
elif key_input == "i":
merge_target_blocklist.append(merge_target_instance)
else:
merge_target_blocklist.append(input_instance)
except KeyError:
pass
def cli():
parser = argparse.ArgumentParser(description='Deploy blocklist updates to a fediverse server')
parser.add_argument('action', choices=['diff', 'deploy', 'export', 'merge'],
help="Either use 'diff' to check the difference between local blockĺist and the blocklist on "
"the server, 'deploy' to apply the current local blocklist or 'export' to export the remote "
"blocklist into a local file. merge can be used to merge a blocklist (given by -i) into "
"another (-o)")
parser.add_argument('-s', '--server', help="The address of the server where you want to deploy (e.g. "
"mastodon.social)")
parser.add_argument('-t', '--token', help="Authorization token")
parser.add_argument('-i', '--input-file', help="The blocklist to use")
parser.add_argument('-r', '--remote-blocklist', help="The remote blocklist as json for debugging reasons")
parser.add_argument('-o', '--output', help="Filename where to export the blocklist")
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-n', '--no-delete', action='store_true', help="Do not delete existing blocks")
parser.add_argument('--format', default="toml", type=str, help="Export format: toml|markdown|csv|json")
parser.add_argument('--private', action='store_true', help="When the flag is set, private comment will also be "
"exported.")
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARN)
if args.token:
token = args.token
else:
token = os.getenv('MBD_TOKEN')
"""if there is a remote blocklist provided load this instead of fetching it from a server (for debugging reasons)"""
if args.remote_blocklist:
with open(args.remote_blocklist) as f:
remote_blocklist = blocklist_json_to_instances(json.load(f))
else:
remote_blocklist = load_blocklist_from_instance(server=args.server, token=token)
"""Load local blocklist only when needed"""
if args.action in ["diff", "deploy", "merge"]:
if args.input_file:
blocklist_filename = args.input_file
else:
blocklist_filename = "../blocklist.toml"
try:
local_blocklist = load_blocklist_file(blocklist_filename)
except FileNotFoundError:
print("Local blocklist file was not found. Make sure to specify it's location via -i")
exit()
if args.action == "diff":
Instance.show_diffs(local_blocklist, remote_blocklist)
elif args.action == "deploy":
diffs = Instance.list_diffs(local_blocklist, remote_blocklist)
Instance.apply_blocks_from_diff(diffs, args.server, token, args.no_delete)
elif args.action == "export":
exporter(remote_blocklist, args.output, args.format, args.private)
elif args.action == "merge":
merge(args.input_file, args.output, args.format, args.private)
if __name__ == "__main__":
cli()

View File

@ -0,0 +1,36 @@
from fediverse_blocklist_deploy.models import Instance
import toml
import io
import csv
import json
def blocklist_to_markdown(blocklist: [Instance], private: bool = False):
if private:
markdown_string = "| Instance | Status | Reason | Private Comment |\n | --- | --- | --- |\n"
else:
markdown_string = "| Instance | Status | Reason |\n | --- | --- | --- |\n"
for instance in blocklist:
if private:
markdown_string += f"| {instance.domain} | {instance.severity} | {instance.public_comment} | {instance.private_comment} |\n"
else:
markdown_string += f"| {instance.domain} | {instance.severity} | {instance.public_comment} |\n"
return markdown_string
def blocklist_to_toml(blocklist: [Instance], private: bool = False):
toml_string = toml.dumps({"instances": [b.as_dict(private) for b in blocklist]})
return toml_string
def blocklist_to_csv(blocklist: [Instance], private: bool = False):
csv_string = io.StringIO()
blocklist_as_dict = [b.as_dict(private) for b in blocklist]
keys = blocklist_as_dict[0].keys()
w = csv.DictWriter(csv_string, keys)
w.writeheader()
w.writerows(blocklist_as_dict)
return csv_string.getvalue()
def blocklist_to_json(blocklist: [Instance], private: bool = False):
json_string = json.dumps([b.as_dict(private) for b in blocklist])
return json_string

View File

@ -2,21 +2,22 @@ import logging
import requests import requests
from typing import Dict, Union
class Instance: class Instance:
def __init__(self, instance_dict): def __init__(self, instance_dict : Dict):
"""If obfuscate, reject_media or reject_reports are not specified default to False""" """If obfuscate, reject_media or reject_reports are not specified default to False"""
self.obfuscate = False self.severity: str = "suspend"
self.reject_media = False self.obfuscate: bool = False
self.reject_reports = False self.reject_media: bool = False
self.id = None self.reject_reports: bool = False
self.id: Union[int, None] = None
self.domain: str = ""
self.private_comment: str = ""
self.public_comment: str = ""
"""Remote blocks and local blocks are parsed differently""" self.parse_block(instance_dict)
try:
instance_dict["id"]
self.parse_remote_block(instance_dict)
except KeyError:
self.parse_local_block(instance_dict)
def __str__(self): def __str__(self):
return f"{self.domain}: {self.severity}" return f"{self.domain}: {self.severity}"
@ -25,56 +26,56 @@ class Instance:
return self.domain == other.domain and self.severity == other.severity and self.reject_media == other.reject_media and self.reject_reports == other.reject_reports and self.obfuscate == other.obfuscate return self.domain == other.domain and self.severity == other.severity and self.reject_media == other.reject_media and self.reject_reports == other.reject_reports and self.obfuscate == other.obfuscate
def status_str(self): def status_str(self):
return f"{self.severity}, Reject reports: {self.reject_reports}, Reject media: {self.reject_media}, Obfuscate: {self.obfuscate}" return f"{self.severity}\nReject reports: {self.reject_reports}\nReject media: {self.reject_media}\nObfuscate: {self.obfuscate}"
def parse_remote_block(self, instance_dict): def as_dict(self, private=False):
self.domain = instance_dict["domain"] keys = ["domain", "severity", "public_comment", "obfuscate", "reject_media", "reject_reports"]
self.id = instance_dict["id"] if private:
self.severity = instance_dict["severity"] keys.append("private_comment")
self.public_comment = instance_dict["public_comment"] exportable = {}
self.private_comment = instance_dict["private_comment"] for key in keys:
self.obfuscate = instance_dict["obfuscate"] exportable[key] = getattr(self, key)
self.reject_media = instance_dict["reject_media"] return exportable
self.reject_reports = instance_dict["reject_reports"]
def parse_local_block(self, instance_dict): def parse_block(self, instance_dict):
self.name = instance_dict["name"] # this specifies possible properties and default values if not found on the remote source. If a default is None
self.domain = instance_dict["domain"] # the value is required and the parse will fail
self.severity = instance_dict["severity"] properties_and_defaults = [("domain", None), ("severity", "suspend"), ("public_comment", ""),
self.public_comment = instance_dict["public_comment"] ("private_comment", ""), ("obfuscate", False), ("reject_media", False),
self.private_comment = instance_dict["private_comment"] ("reject_reports", False)]
try: for key, default in properties_and_defaults:
self.obfuscate = instance_dict["obfuscate"] try:
except KeyError: setattr(self, key, instance_dict[key])
pass except KeyError:
try: if default is not None:
self.reject_media = instance_dict["reject_media"] setattr(self, key, default)
except KeyError: else:
pass raise KeyError(f"The key {key} was not in the instance_dict response.")
try:
self.reject_reports = instance_dict["reject_reports"]
except KeyError:
pass
def apply(self, server, token, block_id=None): def apply(self, server, token, block_id=None):
"""Applies instance block on the remote server"""
headers = { headers = {
f'Authorization': f'Bearer {token}', f'Authorization': f'Bearer {token}',
} }
# As long as we generate this enside of apply we cannot properly test for the correct format
data = {"domain": self.domain, data = {"domain": self.domain,
"severity": self.severity, "severity": self.severity,
"reject_media": self.reject_media, "reject_media": str(self.reject_media).lower(),
"reject_reports": self.reject_reports, "reject_reports": str(self.reject_reports).lower(),
"private_comment": self.private_comment, "private_comment": str(self.private_comment).lower(),
"public_comment": self.public_comment, "public_comment": self.public_comment,
"obfuscate": self.obfuscate} "obfuscate": str(self.obfuscate).lower()}
"""If no id is given add a new block, else update the existing block""" """If no id is given add a new block, else update the existing block"""
if block_id is None: if block_id is None:
response = requests.post(f'https://{server}/api/v1/admin/domain_blocks', data=data, headers=headers) response = requests.post(f'https://{server}/api/v1/admin/domain_blocks', data=data, headers=headers)
else: else:
response = requests.put(f'https://{server}/api/v1/admin/domain_blocks/{block_id}', data=data, headers=headers) response = requests.put(f'https://{server}/api/v1/admin/domain_blocks/{block_id}', data=data,
headers=headers)
if response.status_code != 200: if response.status_code != 200:
raise ConnectionError(f"Could not apply block ({response.status_code}: {response.reason})") raise ConnectionError(f"Could not apply block for {self.domain} ({response.status_code}: {response.reason})")
def delete(self, server: str, token: str): def delete(self, server: str, token: str):
"""Deletes the instance from the blocklist on the remote server"""
headers = { headers = {
f'Authorization': f'Bearer {token}', f'Authorization': f'Bearer {token}',
} }
@ -82,9 +83,9 @@ class Instance:
if response.status_code != 200: if response.status_code != 200:
raise ConnectionError(f"Could not apply block ({response.status_code}: {response.reason})") raise ConnectionError(f"Could not apply block ({response.status_code}: {response.reason})")
@staticmethod @staticmethod
def list_diffs(local_blocklist, remote_blocklist): def list_diffs(local_blocklist, remote_blocklist):
"""Compares the local and remote blocklist and returns a list of differences"""
diffs = [] diffs = []
for local_instance in local_blocklist: for local_instance in local_blocklist:
instance_found = False instance_found = False
@ -107,30 +108,33 @@ class Instance:
return diffs return diffs
@staticmethod @staticmethod
def apply_blocks_from_diff(diffs, server, token): def apply_blocks_from_diff(diffs, server, token, no_delete: bool):
"""Uses a diff (list of difference in local an remote instance) to apply instance blocks"""
for diff in diffs: for diff in diffs:
if diff["local"] is None: if diff["local"] is None:
"""Delete the block on the remote server""" if not no_delete:
diff['remote'].delete(server, token) """Delete the block on the remote server"""
logging.info(f"Deleted {diff['remote'].domain} from blocklist") diff['remote'].delete(server, token)
logging.info(f"Deleted {diff['remote'].domain} from blocklist")
elif diff["remote"] is None: elif diff["remote"] is None:
"""Add the block on the remote server""" """Add the block on the remote server"""
diff["local"].apply(server, token) diff["local"].apply(server, token)
logging.info(f"Added {diff['remote'].domain} to blocklist") logging.info(f"Added {diff['local'].domain} to blocklist")
else: else:
"""Update the block on the remote server""" """Update the block on the remote server"""
diff["local"].apply(server, token, block_id=diff["remote"].id) diff["local"].apply(server, token, block_id=diff["remote"].id)
logging.info(f"Updated {diff['remote'].domain} in blocklist") logging.info(f"Updated {diff['local'].domain} in blocklist")
@staticmethod @staticmethod
def show_diffs(local_blocklist, remote_blocklist): def show_diffs(local_blocklist, remote_blocklist):
"""Shows a table in the CLI comparing the local and remote blocklist"""
from rich.table import Table from rich.table import Table
from rich.console import Console from rich.console import Console
table = Table(title="Differences", expand=True, show_lines=True) table = Table(title="Differences", expand=True, show_lines=True)
table.add_column("Domain", style="cyan") table.add_column("Domain", style="cyan")
table.add_column("Current remote status", style="magenta")
table.add_column("Local status", style="green") table.add_column("Local status", style="green")
table.add_column("Current remote status", style="magenta")
diffs = Instance.list_diffs(local_blocklist, remote_blocklist) diffs = Instance.list_diffs(local_blocklist, remote_blocklist)
for diff in diffs: for diff in diffs:
if diff["local"] is None: if diff["local"] is None:

446
poetry.lock generated Normal file
View File

@ -0,0 +1,446 @@
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
[[package]]
name = "certifi"
version = "2023.7.22"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.6"
files = [
{file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"},
{file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"},
]
[[package]]
name = "charset-normalizer"
version = "3.2.0"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
optional = false
python-versions = ">=3.7.0"
files = [
{file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"},
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"},
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"},
{file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"},
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"},
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"},
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"},
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"},
{file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"},
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"},
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"},
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"},
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"},
{file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"},
{file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"},
{file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"},
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"},
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"},
{file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"},
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"},
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"},
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"},
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"},
{file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"},
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"},
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"},
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"},
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"},
{file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"},
{file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"},
{file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"},
{file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"},
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"},
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"},
{file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"},
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"},
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"},
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"},
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"},
{file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"},
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"},
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"},
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"},
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"},
{file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"},
{file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"},
{file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"},
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"},
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"},
{file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"},
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"},
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"},
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"},
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"},
{file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"},
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"},
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"},
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"},
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"},
{file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"},
{file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"},
{file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"},
{file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"},
]
[[package]]
name = "colorama"
version = "0.4.6"
description = "Cross-platform colored terminal text."
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
{file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"},
]
[[package]]
name = "coverage"
version = "7.2.7"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"},
{file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"},
{file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"},
{file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"},
{file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"},
{file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"},
{file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"},
{file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"},
{file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"},
{file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"},
{file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"},
{file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"},
{file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"},
{file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"},
{file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"},
{file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"},
{file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"},
{file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"},
{file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"},
{file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"},
{file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"},
{file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"},
{file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"},
{file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"},
{file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"},
{file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"},
{file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"},
{file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"},
{file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"},
{file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"},
{file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"},
{file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"},
{file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"},
{file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"},
{file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"},
{file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"},
{file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"},
{file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"},
{file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"},
{file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"},
{file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"},
{file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"},
{file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"},
{file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"},
{file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"},
{file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"},
{file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"},
{file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"},
{file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"},
{file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"},
{file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"},
{file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"},
{file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"},
{file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"},
{file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"},
{file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"},
{file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"},
{file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"},
{file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"},
{file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"},
]
[package.dependencies]
tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""}
[package.extras]
toml = ["tomli"]
[[package]]
name = "exceptiongroup"
version = "1.1.2"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
files = [
{file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"},
{file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"},
]
[package.extras]
test = ["pytest (>=6)"]
[[package]]
name = "idna"
version = "3.4"
description = "Internationalized Domain Names in Applications (IDNA)"
optional = false
python-versions = ">=3.5"
files = [
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
]
[[package]]
name = "iniconfig"
version = "2.0.0"
description = "brain-dead simple config-ini parsing"
optional = false
python-versions = ">=3.7"
files = [
{file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"},
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
]
[[package]]
name = "markdown-it-py"
version = "3.0.0"
description = "Python port of markdown-it. Markdown parsing, done right!"
optional = false
python-versions = ">=3.8"
files = [
{file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
{file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
]
[package.dependencies]
mdurl = ">=0.1,<1.0"
[package.extras]
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
code-style = ["pre-commit (>=3.0,<4.0)"]
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
linkify = ["linkify-it-py (>=1,<3)"]
plugins = ["mdit-py-plugins"]
profiling = ["gprof2dot"]
rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
[[package]]
name = "mdurl"
version = "0.1.2"
description = "Markdown URL utilities"
optional = false
python-versions = ">=3.7"
files = [
{file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
{file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
]
[[package]]
name = "packaging"
version = "23.1"
description = "Core utilities for Python packages"
optional = false
python-versions = ">=3.7"
files = [
{file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
{file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
]
[[package]]
name = "pluggy"
version = "1.2.0"
description = "plugin and hook calling mechanisms for python"
optional = false
python-versions = ">=3.7"
files = [
{file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"},
{file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"},
]
[package.extras]
dev = ["pre-commit", "tox"]
testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "pygments"
version = "2.15.1"
description = "Pygments is a syntax highlighting package written in Python."
optional = false
python-versions = ">=3.7"
files = [
{file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"},
{file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"},
]
[package.extras]
plugins = ["importlib-metadata"]
[[package]]
name = "pytest"
version = "7.4.0"
description = "pytest: simple powerful testing with Python"
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"},
{file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"},
]
[package.dependencies]
colorama = {version = "*", markers = "sys_platform == \"win32\""}
exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""}
iniconfig = "*"
packaging = "*"
pluggy = ">=0.12,<2.0"
tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""}
[package.extras]
testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"]
[[package]]
name = "pytest-cov"
version = "4.1.0"
description = "Pytest plugin for measuring coverage."
optional = false
python-versions = ">=3.7"
files = [
{file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"},
{file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"},
]
[package.dependencies]
coverage = {version = ">=5.2.1", extras = ["toml"]}
pytest = ">=4.6"
[package.extras]
testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"]
[[package]]
name = "requests"
version = "2.31.0"
description = "Python HTTP for Humans."
optional = false
python-versions = ">=3.7"
files = [
{file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"},
{file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"},
]
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<4"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<3"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "requests-mock"
version = "1.11.0"
description = "Mock out responses from the requests package"
optional = false
python-versions = "*"
files = [
{file = "requests-mock-1.11.0.tar.gz", hash = "sha256:ef10b572b489a5f28e09b708697208c4a3b2b89ef80a9f01584340ea357ec3c4"},
{file = "requests_mock-1.11.0-py2.py3-none-any.whl", hash = "sha256:f7fae383f228633f6bececebdab236c478ace2284d6292c6e7e2867b9ab74d15"},
]
[package.dependencies]
requests = ">=2.3,<3"
six = "*"
[package.extras]
fixture = ["fixtures"]
test = ["fixtures", "mock", "purl", "pytest", "requests-futures", "sphinx", "testtools"]
[[package]]
name = "rich"
version = "13.4.2"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
optional = false
python-versions = ">=3.7.0"
files = [
{file = "rich-13.4.2-py3-none-any.whl", hash = "sha256:8f87bc7ee54675732fa66a05ebfe489e27264caeeff3728c945d25971b6485ec"},
{file = "rich-13.4.2.tar.gz", hash = "sha256:d653d6bccede5844304c605d5aac802c7cf9621efd700b46c7ec2b51ea914898"},
]
[package.dependencies]
markdown-it-py = ">=2.2.0"
pygments = ">=2.13.0,<3.0.0"
[package.extras]
jupyter = ["ipywidgets (>=7.5.1,<9)"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
[[package]]
name = "toml"
version = "0.10.2"
description = "Python Library for Tom's Obvious, Minimal Language"
optional = false
python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
files = [
{file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
{file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
]
[[package]]
name = "tomli"
version = "2.0.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.7"
files = [
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
[[package]]
name = "urllib3"
version = "2.0.4"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.7"
files = [
{file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"},
{file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
content-hash = "82de493bc3389f8760e7c66724ea386b6ed9e3cac302d7071a93beb60f5e7e63"

View File

@ -1,10 +1,13 @@
[tool.poetry] [tool.poetry]
name = "mastodon-blocklist-deploy" name = "fediverse-blocklist-deploy"
version = "0.1.0" version = "0.1.0"
description = "A small tool to deploy blocklist updates to a mastodon server using its API." description = "A small tool to export, compareof merge and deploy blocklists of a fediverse server"
authors = ["Georg Krause <mail@georg-krause.net>", "Julian-Samuel Gebühr <julian-samuel@gebuehr.net>"] authors = ["Georg Krause <mail@georg-krause.net>", "Julian-Samuel Gebühr <julian-samuel@gebuehr.net>"]
readme = "README.md" readme = "README.md"
packages = [{include = "mastodon_blocklist_deploy"}] packages = [{include = "fediverse_blocklist_deploy"}]
license = "MIT"
keywords = ["fediverse", "blocklist", "mastodon", "gotosocial", "safety"]
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.10" python = "^3.10"
@ -12,6 +15,14 @@ requests = "^2.28.1"
rich = "^13.0.1" rich = "^13.0.1"
toml = "^0.10.2" toml = "^0.10.2"
[tool.poetry.scripts]
fediverse_blocklist_deploy = 'fediverse_blocklist_deploy.cli:cli'
[tool.poetry.group.test.dependencies]
pytest = "^7.4.0"
pytest-cov = "^4.1.0"
requests-mock = "^1.11.0"
[build-system] [build-system]
requires = ["poetry-core"] requires = ["poetry-core"]

92
tests/test_models.py Normal file
View File

@ -0,0 +1,92 @@
import pytest
from fediverse_blocklist_deploy import models
def test_empty_instance():
with pytest.raises(KeyError):
models.Instance = models.Instance({})
def test_minimal_init():
i: models.Instance = models.Instance({"domain": "abc.xyz"})
assert i.id == None
assert i.domain == "abc.xyz"
assert i.obfuscate == False
assert i.reject_media == False
assert i.reject_reports == False
def test_string_representation():
i: models.Instance = models.Instance({"domain": "abc.xyz"})
assert str(i) == "abc.xyz: suspend"
def test_status():
i: models.Instance = models.Instance({"domain": "abc.xyz"})
assert i.status_str() == "suspend\nReject reports: False\nReject media: False\nObfuscate: False"
def test_equality():
a1: models.Instance = models.Instance({"domain": "a"})
a2: models.Instance = models.Instance({"domain": "a"})
b: models.Instance = models.Instance({"domain": "b"})
assert a1 == a2
assert a2 != b
def test_as_dict():
test_data = {"domain": "abc.xyz", "severity": "suspend", "private_comment": "hidden", "public_comment": "", "obfuscate": True, "reject_media": False, "reject_reports": False}
i: models.Instance = models.Instance(test_data)
test_data.pop("private_comment")
assert i.as_dict() == test_data
def test_as_dict_private():
test_data = {"domain": "abc.xyz", "severity": "suspend", "private_comment": "hidden", "public_comment": "", "obfuscate": True, "reject_media": False, "reject_reports": False}
i: models.Instance = models.Instance(test_data)
assert i.as_dict(private=True) == test_data
def test_apply(requests_mock):
requests_mock.post("https://server.org/api/v1/admin/domain_blocks", text="success")
i: models.Instance = models.Instance({"domain": "abc.xyz"})
i.apply("server.org", token="abcdef")
assert requests_mock.called
def test_apply_with_id(requests_mock):
requests_mock.put("https://server.org/api/v1/admin/domain_blocks/123", text="success")
i: models.Instance = models.Instance({"domain": "abc.xyz"})
i.apply("server.org", token="abcdef", block_id=123)
assert requests_mock.called
def test_apply_error(requests_mock):
requests_mock.post("https://server.org/api/v1/admin/domain_blocks", status_code=400)
with pytest.raises(ConnectionError):
i: models.Instance = models.Instance({"domain": "abc.xyz"})
i.apply("server.org", token="abcdef")
assert requests_mock.called
def test_delete(requests_mock):
requests_mock.delete("https://server.org/api/v1/admin/domain_blocks/123", text="success")
i: models.Instance = models.Instance({"domain": "abc.xyz"})
i.id = 123
i.delete("server.org", token="abcdef")
assert requests_mock.called
def test_delete_error(requests_mock):
requests_mock.delete("https://server.org/api/v1/admin/domain_blocks/123", status_code=400)
i: models.Instance = models.Instance({"domain": "abc.xyz"})
i.id = 123
with pytest.raises(ConnectionError):
i.delete("server.org", token="abcdef")
assert requests_mock.called
def test_diff_equal():
a1: models.Instance = models.Instance({"domain": "a"})
a2: models.Instance = models.Instance({"domain": "a"})
assert models.Instance.list_diffs([a1], [a2]) == []
def test_diff_not_equal():
a1: models.Instance = models.Instance({"domain": "a2"})
a2: models.Instance = models.Instance({"domain": "a1"})
assert models.Instance.list_diffs([a1], [a2]) == [{"local": a1, "remote": None}, {"local": None, "remote": a2}]