commit 9cacabd0bf91d8b8199f48043d61ea5c331ee608
parent b5cad008ea437b626c4fd1d79ab599b17c749061
Author: Vetle Haflan <vetle@haflan.dev>
Date: Sat, 28 Dec 2019 23:13:22 +0100
Add some more or less useful python scripts
Diffstat:
A | BRenamer.py | | | 45 | +++++++++++++++++++++++++++++++++++++++++++++ |
A | finddups.py | | | 39 | +++++++++++++++++++++++++++++++++++++++ |
A | gitlab-issue-bot.py | | | 132 | +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ |
3 files changed, 216 insertions(+), 0 deletions(-)
diff --git a/BRenamer.py b/BRenamer.py
@@ -0,0 +1,45 @@
+#!/bin/python
+
+# Bulk renamer, currently in a pretty specific format.
+# Continue building on this to make a general bulk renamer
+
+import sys
+import os
+import re
+from subprocess import call
+
+cwd = os.getcwd()
+filelist = os.listdir(cwd)
+filelist.sort()
+if len(sys.argv) > 2:
+ regout = sys.argv[2]
+ regin = sys.argv[1]
+else:
+ regin = input("Enter regex for input files: ")
+ regout = input("Enter common part for output files: ")
+ # () is replaced with the unique part
+
+rin = re.compile(regin)
+
+matches = []
+new_names = []
+for filename in filelist:
+ match = rin.search(filename)
+ if match:
+# Group 1 contains unique part of original file_name.
+# Place this correctly in the new filename:
+ print(match.group(1))
+ new_nm = regout.replace("{}", match.group(1))
+ matches.append(filename)
+ new_names.append(new_nm)
+
+for i in range(len(new_names)):
+ print("\t{} -> {}".format(matches[i], new_names[i]))
+
+reprompt = input("\nRename the above files? [y/n]: ")
+if reprompt == 'y':
+ print("renamin'")
+ for i in range(len(new_names)):
+ call(["mv","{}/{}".format(cwd, matches[i]),"{}/{}".format(cwd, new_names[i])])
+
+
diff --git a/finddups.py b/finddups.py
@@ -0,0 +1,39 @@
+#!/usr/bin/python3
+
+# TODO: Allow giving multiple directories as arguments and check them all
+# TODO: Discover directories with no unique files
+# TODO: Make interactive deletion functionality?
+
+import os
+from hashlib import sha256
+
+filefilter = ".jpg" # TODO: Make this an argument, use regex ++
+
+hash_to_file_list = {}
+
+def get_file_hash(fi):
+ with open(fi, "rb") as f:
+ return sha256(f.read()).hexdigest()
+
+### Generate hash for all files
+for rootdir, _, files in os.walk("."):
+ try:
+ files = [f for f in files if filefilter in f]
+ for fi in files:
+ fpath = os.path.join(rootdir, fi)
+ fhash = get_file_hash(fpath)
+ if fhash in hash_to_file_list:
+ hash_to_file_list[fhash] += [os.path.abspath(fpath)]
+ else:
+ hash_to_file_list[fhash] = [os.path.abspath(fpath)]
+ except Exception as e :
+ print(e)
+
+### Find all lists with two or more elements (i.e. duplicate files)
+dls = [copies for copies in hash_to_file_list.values() if len(copies) >= 2]
+if len(dls) > 0:
+ print("The following files are copies of each other:")
+ for duplist in dls:
+ print(" = ".join(duplist))
+else:
+ print("No duplicates were found :)")
diff --git a/gitlab-issue-bot.py b/gitlab-issue-bot.py
@@ -0,0 +1,132 @@
+#!/usr/bin/python3
+from datetime import datetime as dt
+import requests
+import json
+import sys
+from os.path import join, expanduser
+
+DEBUG_MODE = False
+# Number of days that issues can be inactive before being considered stale,
+# depending on the issue label
+LABELS_EXPIRY_DAYS = {
+ "Doing": 14,
+ "Current sprint": 30,
+ "Next sprint": 90,
+ }
+
+STALE_LABEL = "Stale"
+ISSUE_NOTE_STALE = "Labeled `Stale`, as issue hasn't been updated in a long time"
+LOGMSG_STALE_ISSUE = "Issue \u001b[1m{}\u001b[0m with label '{}' is more than {} days old. Marking as stale..."
+
+# API URLS
+URL_BASE = "https://gl.haflan.dev/api/v4"
+URL_ISSUES = URL_BASE + "/issues?state=opened&per_page=100"
+URL_PROJECTS = URL_BASE + "/projects"
+URL_ISSUE_NOTES = URL_BASE + "/projects/{}/issues/{}/notes?sort=desc&order_by=updated_at&per_page=1"
+
+URL_ISSUE_PUT = URL_BASE + "/projects/{}/issues/{}" # format(project_id, issue_iid)
+URL_NOTE_POST = URL_BASE + "/projects/{}/issues/{}/notes" # format(project_id, issue_iid)
+
+# Load token from file
+TOKENFILE = join(expanduser("~"), ".secrets/gitlab-token")
+if len(sys.argv) > 2:
+ TOKENFILE = sys.argv[1]
+
+TOKEN=open(TOKENFILE).readlines()[0].strip()
+TOKENHEADER = {"PRIVATE-TOKEN": TOKEN }
+
+def gitget(URL):
+ return requests.get(URL, headers=TOKENHEADER).json()
+
+def gitput(URL, json_data):
+ return
+
+# Not sure if this covers all kinds of activity. Check thoroughly
+def is_stale(issue, days_before_stale):
+ latest_update_time = issue["updated_at"]
+ notes = gitget(URL_ISSUE_NOTES.format(issue["project_id"], issue["iid"]))
+ if notes:
+ latest_update_time = notes[0]["updated_at"]
+ latest_update_time = latest_update_time.split("T")[0] # only Y-m-d needed
+ dt_diff = dt.now() - dt.strptime(latest_update_time, "%Y-%m-%d")
+ return dt_diff.days > days_before_stale
+
+def find_relevant_label(issue):
+ for label in issue["labels"]:
+ if label in LABELS_EXPIRY_DAYS:
+ return label
+ return None
+
+def mark_as_stale(issue, old_label):
+ issue["labels"].remove(old_label)
+ # Shouldn't be possible, but just in case:
+ if STALE_LABEL not in issue["labels"]:
+ issue["labels"].append(STALE_LABEL)
+ updated_issue = {
+ "id": issue["id"],
+ "iid": issue["iid"],
+ # labels is a comma-separated list, not an array
+ "labels": "{}".format(','.join(issue["labels"]))
+ }
+ issue_url = URL_ISSUE_PUT.format(issue["project_id"], issue["iid"])
+ note_url = URL_ISSUE_NOTES.format(issue["project_id"], issue["iid"])
+ information_note = {"body": ISSUE_NOTE_STALE }
+ if not DEBUG_MODE:
+ requests.put(issue_url, headers=TOKENHEADER, data=updated_issue)
+ requests.post(note_url, headers=TOKENHEADER, data=information_note)
+
+if __name__ == "__main__":
+ # Make mapping from project ids to names (for logging only)
+ project_id_name = {}
+ for project in gitget(URL_PROJECTS):
+ project_id_name[project["id"]] = project["name"]
+ # Find and check *all* issues
+ all_issues = gitget(URL_ISSUES)
+ for issue in all_issues:
+ issue_name = "{}#{}".format(project_id_name[issue["project_id"]], issue["iid"])
+ relevant_label = find_relevant_label(issue)
+ if relevant_label and is_stale(issue, LABELS_EXPIRY_DAYS[relevant_label]):
+ print(LOGMSG_STALE_ISSUE.format(issue_name, relevant_label,
+ LABELS_EXPIRY_DAYS[relevant_label]))
+ mark_as_stale(issue, relevant_label)
+
+"""
+PROJECT_URL_ISSUES = URL_BASE + "/projects/{}/issues?state=opened&per_page=100"
+
+# Like groups: No reason to go via project names either
+project_id_name = {}
+project_name_id = {}
+for project in gitget(URL_PROJECTS):
+ if project["id"] not in project_id_name:
+ project_id_name[project["id"]] = project["name"]
+ project_name_id[project["name"]] = project["id"]
+
+for pid in project_id_name:
+ #print("\u001b[1m" + project_id_name[pid] + ":\u001b[0m")
+ for issue in gitget(PROJECT_URL_ISSUES.format(pid)):
+ if has_relevant_label(issue):
+ check_expiry(issue)
+
+"""
+
+"""
+GROUPS_URL = URL_BASE + "/groups"
+# Projects are addressed by their ID, so group is not that important
+groups = [ {"id": group["id"], "name": group["name"]} for group in gitget(GROUPS_URL)]
+for group in groups:
+ print("\u001b[1m" + group["name"] + ":\u001b[0m ", end="")
+ print(len(gitget(GROUPS_URL + "/" + str(group["id"]) + "/issues?per_page=100")))
+"""
+
+"""
+for issue in all_open_issues:
+ print()
+ project = project_id_name[int(issue["project_id"])]
+ issue_str_id = project + "#" + str(issue["iid"])
+ issue_string_ids[issue_str_id] = issue
+ print(issue_str_id + ": \u001b[1m" + issue["title"] + "\u001b[0m")
+ print(issue["description"])
+ prompt = input()
+ if prompt == "exit":
+ exit()
+"""