Skip to content

Make some automated optimizations using ruff #659

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 4 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion github_dependents_info/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from importlib import metadata as importlib_metadata

from .gh_dependents_info import GithubDependentsInfo # noqa
from .gh_dependents_info import GithubDependentsInfo # noqa: F401


def get_version() -> str:
Expand Down
69 changes: 34 additions & 35 deletions github_dependents_info/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def version_callback(print_version: bool) -> None:
"""Print the version of the package."""
if print_version:
console.print(f"[yellow]github-dependents-info[/] version: [bold blue]{version}[/]")
raise typer.Exit()
raise typer.Exit


@app.command(name="")
Expand Down Expand Up @@ -86,40 +86,39 @@ def main(
# Check minimum arguments
if repo is None:
raise ValueError("--repo argument is mandatory")
else:
# Manage default values :)
if outputrepo is None:
outputrepo = repo
if sort_key is None:
sort_key = "name"
if min_stars is None:
min_stars = 0
# Create GithubDependentsInfo instance
gh_deps_info = GithubDependentsInfo(
repo,
outputrepo=outputrepo,
debug=verbose,
overwrite_progress=overwrite,
sort_key=sort_key,
min_stars=min_stars,
json_output=json_output,
csv_directory=csv_directory,
badge_markdown_file=badge_markdown_file,
doc_url=doc_url,
markdown_file=markdown_file,
badge_color=badge_color,
merge_packages=merge_packages,
)
# Collect data
gh_deps_info.collect()
# Write output markdown
if markdown_file is not None:
gh_deps_info.build_markdown(file=markdown_file)
# Update existing markdown to add badge
if badge_markdown_file is not None:
gh_deps_info.write_badge(badge_markdown_file, "total_doc_url")
# Print text or json result
gh_deps_info.print_result()
# Manage default values :)
if outputrepo is None:
outputrepo = repo
if sort_key is None:
sort_key = "name"
if min_stars is None:
min_stars = 0
# Create GithubDependentsInfo instance
gh_deps_info = GithubDependentsInfo(
repo,
outputrepo=outputrepo,
debug=verbose,
overwrite_progress=overwrite,
sort_key=sort_key,
min_stars=min_stars,
json_output=json_output,
csv_directory=csv_directory,
badge_markdown_file=badge_markdown_file,
doc_url=doc_url,
markdown_file=markdown_file,
badge_color=badge_color,
merge_packages=merge_packages,
)
# Collect data
gh_deps_info.collect()
# Write output markdown
if markdown_file is not None:
gh_deps_info.build_markdown(file=markdown_file)
# Update existing markdown to add badge
if badge_markdown_file is not None:
gh_deps_info.write_badge(badge_markdown_file, "total_doc_url")
# Print text or json result
gh_deps_info.print_result()


if __name__ == "__main__":
Expand Down
55 changes: 20 additions & 35 deletions github_dependents_info/gh_dependents_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,27 +15,21 @@
class GithubDependentsInfo:
def __init__(self, repo, **options) -> None:
self.repo = repo
self.outputrepo = self.repo if "outputrepo" not in options else options["outputrepo"]
if self.outputrepo is None or self.outputrepo == "" or len(self.outputrepo) < 4:
self.outputrepo = options.get("outputrepo", self.repo)
if len(self.outputrepo or "") < 4:
self.outputrepo = self.repo
self.url_init = f"https://github.com/{self.repo}/network/dependents"
self.url_starts_with = f"/{self.repo}/network/dependents" + "?package_id="
self.sort_key = "name" if "sort_key" not in options else options["sort_key"]
self.min_stars = None if "min_stars" not in options else options["min_stars"]
self.json_output = True if "json_output" in options and options["json_output"] is True else False
self.merge_packages = True if "merge_packages" in options and options["merge_packages"] is True else False
self.doc_url = options["doc_url"] if "doc_url" in options else None
self.markdown_file = options["markdown_file"] if "markdown_file" in options else None
self.badge_color = options["badge_color"] if "badge_color" in options else "informational"
self.debug = True if "debug" in options and options["debug"] is True else False
self.overwrite_progress = (
True if "overwrite_progress" in options and options["overwrite_progress"] is True else False
)
self.csv_directory = (
Path(options["csv_directory"])
if ("csv_directory" in options and options["csv_directory"] is not None)
else None
)
self.url_starts_with = f"/{self.repo}/network/dependents?package_id="
self.sort_key = options.get("sort_key", "name")
self.min_stars = options.get("min_stars")
self.json_output = bool(options.get("json_output"))
self.merge_packages = bool(options.get("merge_packages"))
self.doc_url = options.get("doc_url")
self.markdown_file = options.get("markdown_file")
self.badge_color = options.get("badge_color", "informational")
self.debug = bool(options.get("debug"))
self.overwrite_progress = bool(options.get("overwrite_progress"))
self.csv_directory = Path(options.get("csv_directory", "."))
self.total_sum = 0
self.total_public_sum = 0
self.total_private_sum = 0
Expand Down Expand Up @@ -129,10 +123,7 @@ def collect(self):
logging.info(" - browsing page " + str(page_number))

# Manage results for package
if self.sort_key == "stars":
result = sorted(result, key=lambda d: d[self.sort_key], reverse=True)
else:
result = sorted(result, key=lambda d: d[self.sort_key])
result = sorted(result, key=lambda d: d[self.sort_key], reverse=self.sort_key == "stars")
if self.debug is True:
for r in result:
logging.info(r)
Expand Down Expand Up @@ -235,12 +226,12 @@ def save_progress(self, package):
pd.json_normalize(source_info).to_csv(file_path_sources, mode="w", header=True)
else:
sources_all_df = pd.read_csv(file_path_sources, index_col=0)
if package["name"] in sources_all_df["name"].values:
if package["name"] in sources_all_df["name"].to_numpy():
# update the row with the new information
sources_all_df.set_index("name", inplace=True)
sources_all_df = sources_all_df.set_index("name")
source_df = pd.json_normalize(source_info).set_index("name", drop=True)
sources_all_df.update(source_df)
sources_all_df.reset_index(inplace=True, drop=False)
sources_all_df = sources_all_df.reset_index(drop=False)
sources_all_df.to_csv(file_path_sources, mode="w", header=True)
else:
pd.json_normalize(source_info).to_csv(file_path_sources, mode="a", header=False)
Expand Down Expand Up @@ -411,12 +402,9 @@ def build_repo_md_line(self, md_lines, repo1):
md_lines += [f"|{image_md} &nbsp; [{repo_label}](https://github.com/{repo_label}) | {repo_stars} |"]

def build_badge(self, label, nb, **options):
if "url" in options:
url = options["url"]
else:
url = f"https://github.com/{self.repo}/network/dependents"
url = options.get("url", f"https://github.com/{self.repo}/network/dependents")
return (
f"[![Generated by github-dependents-info](https://img.shields.io/static/v1?label={label}&message={str(nb)}"
f"[![Generated by github-dependents-info](https://img.shields.io/static/v1?label={label}&message={nb!s}"
+ f"&color={self.badge_color}&logo=slickpic)]({url})"
)

Expand Down Expand Up @@ -461,10 +449,7 @@ def replace_in_file(self, file_path, start, end, content, add_new_line=False):
print(f"[Warning] Can not update badge if it does not contain tags {start} and {end}")
return
# Replace the target string
if add_new_line is True:
replacement = f"{start}\n{content}\n{end}"
else:
replacement = f"{start}\n{content}{end}"
replacement = f"{start}\n{content}\n{end}" if add_new_line is True else f"{start}\n{content}{end}"
regex = rf"{start}([\s\S]*?){end}"
file_content = re.sub(regex, replacement, file_content, re.DOTALL)
# Write the file out again
Expand Down