Skip to content

Commit

Permalink
Merge pull request #760 from OMEGARAZER/pep585
Browse files Browse the repository at this point in the history
  • Loading branch information
Serene-Arc authored Jan 28, 2023
2 parents e96b167 + 63b0607 commit a01b18a
Show file tree
Hide file tree
Showing 14 changed files with 33 additions and 30 deletions.
8 changes: 5 additions & 3 deletions bdfr/archiver.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,10 @@
import json
import logging
import re
from collections.abc import Iterable, Iterator
from pathlib import Path
from time import sleep
from typing import Iterable, Iterator, Union
from typing import Union

import dict2xml
import praw.models
Expand Down Expand Up @@ -108,13 +110,13 @@ def _write_entry_xml(self, entry: BaseArchiveEntry):

def _write_entry_yaml(self, entry: BaseArchiveEntry):
resource = Resource(entry.source, "", lambda: None, ".yaml")
content = yaml.dump(entry.compile())
content = yaml.safe_dump(entry.compile())
self._write_content_to_disk(resource, content)

def _write_content_to_disk(self, resource: Resource, content: str):
file_path = self.file_name_formatter.format_path(resource, self.download_directory)
file_path.parent.mkdir(exist_ok=True, parents=True)
with open(file_path, "w", encoding="utf-8") as file:
with Path(file_path).open(mode="w", encoding="utf-8") as file:
logger.debug(
f"Writing entry {resource.source_submission.id} to file in {resource.extension[1:].upper()}"
f" format at {file_path}"
Expand Down
2 changes: 1 addition & 1 deletion bdfr/cloner.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-

import logging
from collections.abc import Iterable
from time import sleep
from typing import Iterable

import prawcore

Expand Down
6 changes: 3 additions & 3 deletions bdfr/completion.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def install(self):
Path(comp_dir).mkdir(parents=True, exist_ok=True)
for point in self.entry_points:
self.env[f"_{point.upper().replace('-', '_')}_COMPLETE"] = "bash_source"
with open(comp_dir + point, "w") as file:
with Path(comp_dir + point).open(mode="w") as file:
file.write(subprocess.run([point], env=self.env, capture_output=True, text=True).stdout)
print(f"Bash completion for {point} written to {comp_dir}{point}")
if self.shell in ("all", "fish"):
Expand All @@ -33,7 +33,7 @@ def install(self):
Path(comp_dir).mkdir(parents=True, exist_ok=True)
for point in self.entry_points:
self.env[f"_{point.upper().replace('-', '_')}_COMPLETE"] = "fish_source"
with open(comp_dir + point + ".fish", "w") as file:
with Path(comp_dir + point + ".fish").open(mode="w") as file:
file.write(subprocess.run([point], env=self.env, capture_output=True, text=True).stdout)
print(f"Fish completion for {point} written to {comp_dir}{point}.fish")
if self.shell in ("all", "zsh"):
Expand All @@ -43,7 +43,7 @@ def install(self):
Path(comp_dir).mkdir(parents=True, exist_ok=True)
for point in self.entry_points:
self.env[f"_{point.upper().replace('-', '_')}_COMPLETE"] = "zsh_source"
with open(comp_dir + "_" + point, "w") as file:
with Path(comp_dir + "_" + point).open(mode="w") as file:
file.write(subprocess.run([point], env=self.env, capture_output=True, text=True).stdout)
print(f"Zsh completion for {point} written to {comp_dir}_{point}")

Expand Down
2 changes: 1 addition & 1 deletion bdfr/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ def parse_yaml_options(self, file_path: str):
return
with yaml_file_loc.open() as file:
try:
opts = yaml.load(file, Loader=yaml.FullLoader)
opts = yaml.safe_load(file)
except yaml.YAMLError as e:
logger.error(f"Could not parse YAML options file: {e}")
return
Expand Down
6 changes: 3 additions & 3 deletions bdfr/connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,11 @@
import shutil
import socket
from abc import ABCMeta, abstractmethod
from collections.abc import Callable, Iterable, Iterator
from datetime import datetime
from enum import Enum, auto
from pathlib import Path
from time import sleep
from typing import Callable, Iterable, Iterator

import appdirs
import praw
Expand Down Expand Up @@ -119,7 +119,7 @@ def read_config(self):
)
logger.debug(f"Setting filename restriction scheme to '{self.args.filename_restriction_scheme}'")
# Update config on disk
with open(self.config_location, "w") as file:
with Path(self.config_location).open(mode="w") as file:
self.cfg_parser.write(file)

def parse_disabled_modules(self):
Expand All @@ -143,7 +143,7 @@ def create_reddit_instance(self):
)
token = oauth2_authenticator.retrieve_new_token()
self.cfg_parser["DEFAULT"]["user_token"] = token
with open(self.config_location, "w") as file:
with Path(self.config_location).open(mode="w") as file:
self.cfg_parser.write(file, True)
token_manager = OAuth2TokenManager(self.cfg_parser, self.config_location)

Expand Down
4 changes: 2 additions & 2 deletions bdfr/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
import logging.handlers
import os
import time
from collections.abc import Iterable
from datetime import datetime
from multiprocessing import Pool
from pathlib import Path
from time import sleep
from typing import Iterable

import praw
import praw.exceptions
Expand Down Expand Up @@ -156,7 +156,7 @@ def _download_submission(self, submission: praw.models.Submission):
@staticmethod
def scan_existing_files(directory: Path) -> dict[str, Path]:
files = []
for (dirpath, dirnames, filenames) in os.walk(directory):
for (dirpath, _dirnames, filenames) in os.walk(directory):
files.extend([Path(dirpath, file) for file in filenames])
logger.info(f"Calculating hashes for {len(files)} files")

Expand Down
2 changes: 1 addition & 1 deletion bdfr/oauth2.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,6 @@ def pre_refresh_callback(self, authorizer: praw.reddit.Authorizer):

def post_refresh_callback(self, authorizer: praw.reddit.Authorizer):
self.config.set("DEFAULT", "user_token", authorizer.refresh_token)
with open(self.config_location, "w") as file:
with Path(self.config_location).open(mode="w") as file:
self.config.write(file, True)
logger.log(9, f"Written OAuth2 token from authoriser to {self.config_location}")
3 changes: 2 additions & 1 deletion bdfr/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,8 @@
import re
import time
import urllib.parse
from typing import Callable, Optional
from collections.abc import Callable
from typing import Optional

import _hashlib
import requests
Expand Down
3 changes: 1 addition & 2 deletions bdfr/site_downloaders/download_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@

import re
import urllib.parse
from typing import Type

from bdfr.exceptions import NotADownloadableLinkError
from bdfr.site_downloaders.base_downloader import BaseDownloader
Expand All @@ -24,7 +23,7 @@

class DownloadFactory:
@staticmethod
def pull_lever(url: str) -> Type[BaseDownloader]:
def pull_lever(url: str) -> type[BaseDownloader]:
sanitised_url = DownloadFactory.sanitise_url(url)
if re.match(r"(i\.|m\.)?imgur", sanitised_url):
return Imgur
Expand Down
3 changes: 2 additions & 1 deletion bdfr/site_downloaders/erome.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@

import logging
import re
from typing import Callable, Optional
from collections.abc import Callable
from typing import Optional

import bs4
from praw.models import Submission
Expand Down
3 changes: 2 additions & 1 deletion bdfr/site_downloaders/youtube.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@

import logging
import tempfile
from collections.abc import Callable
from pathlib import Path
from typing import Callable, Optional
from typing import Optional

import yt_dlp
from praw.models import Submission
Expand Down
2 changes: 1 addition & 1 deletion tests/test_connector.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-

from collections.abc import Iterator
from datetime import datetime, timedelta
from pathlib import Path
from typing import Iterator
from unittest.mock import MagicMock

import praw
Expand Down
5 changes: 2 additions & 3 deletions tests/test_downloader.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import logging
import os
import re
from pathlib import Path
from unittest.mock import MagicMock, patch
Expand Down Expand Up @@ -118,12 +117,12 @@ def test_file_creation_date(
RedditDownloader._download_submission(downloader_mock, submission)

for file_path in Path(tmp_path).iterdir():
file_stats = os.stat(file_path)
file_stats = Path(file_path).stat()
assert file_stats.st_mtime == test_creation_date


def test_search_existing_files():
results = RedditDownloader.scan_existing_files(Path("."))
results = RedditDownloader.scan_existing_files(Path())
assert len(results.keys()) != 0


Expand Down
14 changes: 7 additions & 7 deletions tests/test_file_name_formatter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import unittest.mock
from datetime import datetime
from pathlib import Path
from typing import Optional, Type, Union
from typing import Optional, Union
from unittest.mock import MagicMock

import praw.models
Expand Down Expand Up @@ -214,15 +214,15 @@ def test_format_full_with_index_suffix(

def test_format_multiple_resources():
mocks = []
for i in range(1, 5):
for _i in range(1, 5):
new_mock = MagicMock()
new_mock.url = "https://example.com/test.png"
new_mock.extension = ".png"
new_mock.source_submission.title = "test"
new_mock.source_submission.__class__ = praw.models.Submission
mocks.append(new_mock)
test_formatter = FileNameFormatter("{TITLE}", "", "ISO")
results = test_formatter.format_resource_paths(mocks, Path("."))
results = test_formatter.format_resource_paths(mocks, Path())
results = set([str(res[0].name) for res in results])
expected = {"test_1.png", "test_2.png", "test_3.png", "test_4.png"}
assert results == expected
Expand All @@ -238,7 +238,7 @@ def test_format_multiple_resources():
),
)
def test_limit_filename_length(test_filename: str, test_ending: str, test_formatter: FileNameFormatter):
result = test_formatter.limit_file_name_length(test_filename, test_ending, Path("."))
result = test_formatter.limit_file_name_length(test_filename, test_ending, Path())
assert len(result.name) <= 255
assert len(result.name.encode("utf-8")) <= 255
assert len(str(result)) <= FileNameFormatter.find_max_path_length()
Expand All @@ -262,7 +262,7 @@ def test_limit_filename_length(test_filename: str, test_ending: str, test_format
def test_preserve_id_append_when_shortening(
test_filename: str, test_ending: str, expected_end: str, test_formatter: FileNameFormatter
):
result = test_formatter.limit_file_name_length(test_filename, test_ending, Path("."))
result = test_formatter.limit_file_name_length(test_filename, test_ending, Path())
assert len(result.name) <= 255
assert len(result.name.encode("utf-8")) <= 255
assert result.name.endswith(expected_end)
Expand Down Expand Up @@ -509,13 +509,13 @@ def test_windows_max_path(tmp_path: Path):
)
def test_name_submission(
test_reddit_id: str,
test_downloader: Type[BaseDownloader],
test_downloader: type[BaseDownloader],
expected_names: set[str],
reddit_instance: praw.reddit.Reddit,
):
test_submission = reddit_instance.submission(id=test_reddit_id)
test_resources = test_downloader(test_submission).find_resources()
test_formatter = FileNameFormatter("{TITLE}", "", "")
results = test_formatter.format_resource_paths(test_resources, Path("."))
results = test_formatter.format_resource_paths(test_resources, Path())
results = set([r[0].name for r in results])
assert results == expected_names

0 comments on commit a01b18a

Please sign in to comment.