PK!`bfGGprcop/__init__.pyfrom .checker import check __version__ = "0.1.0" __all__ = ["check"] PK!ߖ(prcop/alerts.pyclass ReviewOverdueAlert: def __init__(self, pr): self.pr = pr def __str__(self): opened = int(self.pr.business_hours_since_opened.total_seconds() / 3600) reviews_str = "review" if self.pr.reviews_remaining == 1 else "reviews" return ( f'Pull request "{self.pr.title}" (opened {opened} business hours ago) ' f"needs {self.pr.reviews_remaining} more {reviews_str}:\n" f"{self.pr.url}" ) PK!ʇxprcop/business_hours.pyfrom datetime import datetime, time, timedelta START_OF_DAY = time(9) END_OF_DAY = time(17) SAT = 5 SUN = 6 def within_business_hours(dt): return dt.weekday() < SAT and START_OF_DAY <= dt.time() <= END_OF_DAY def business_hours_between_dates(start, end): if start.time() < START_OF_DAY: start = datetime.combine(start.date(), START_OF_DAY) elif start.time() >= END_OF_DAY: start = datetime.combine(start.date() + timedelta(days=1), START_OF_DAY) if start.weekday() in (SAT, SUN): start = datetime.combine(start + timedelta(days=start.weekday() - 4), START_OF_DAY) if end.time() <= START_OF_DAY: end = datetime.combine(end.date() - timedelta(days=1), END_OF_DAY) elif end.time() > END_OF_DAY: end = datetime.combine(end.date(), END_OF_DAY) if end.weekday() in (SAT, SUN): end = datetime.combine(end - timedelta(days=end.weekday() - 4), END_OF_DAY) if end <= start: return timedelta(0) outside_business_hours = 0 d = start.date() while d < end.date(): outside_business_hours += 24 if d.weekday() in (SAT, SUN) else 24 - 17 + 9 d += timedelta(days=1) return end - start - timedelta(hours=outside_business_hours) PK!prcop/checker.pyimport json import logging from datetime import datetime, timedelta from pathlib import Path from .alerts import ReviewOverdueAlert from .business_hours import business_hours_between_dates, within_business_hours from .config import Config from .http_client import HttpClient logger = logging.getLogger(__name__) class PullRequest: _MIN_TIME_OPENED = timedelta(hours=3) _MIN_APPROVALS = 2 def __init__(self, data, *, repo, record): self._data = data self._repo = repo self._record = record def alerts(self): logger.debug( "%s business hours since opened: %s", self._repo.full_slug, self.business_hours_since_opened, ) logger.debug("%s recently alerted: %s", self._repo.full_slug, self._recently_alerted) logger.debug("%s reviews remaining: %s", self._repo.full_slug, self.reviews_remaining) logger.debug("%s needs work: %s", self._repo.full_slug, self._needs_work) if ( self.business_hours_since_opened >= self._MIN_TIME_OPENED and not self._recently_alerted and self.reviews_remaining and not self._needs_work ): self._record.record_alert(self._id) return [ReviewOverdueAlert(self)] return [] @property def reviews_remaining(self): return max(self._MIN_APPROVALS - self._approvals, 0) @property def _id(self): return str(self._data["id"]) @property def title(self): return self._data["title"] @property def _recently_alerted(self): return self._record.alerted_recently(self._id) @property def _approvals(self): return sum(review["status"] == "APPROVED" for review in self._data["reviewers"]) @property def _needs_work(self): return any(review["status"] == "NEEDS_WORK" for review in self._data["reviewers"]) @property def business_hours_since_opened(self): return business_hours_between_dates( datetime.fromtimestamp(self._data["createdDate"] / 1000), datetime.now() ) @property def url(self): return ( f"{self._repo.base_url}/projects/{self._repo.project_slug}/" f"repos/{self._repo.slug}/pull-requests/{self._id}/" ) class Repo: def __init__(self, base_url, project, repo, *, record, http): self.base_url = base_url self.project_slug = project self.slug = repo self._record = record self._http = http def alerts(self): url = ( f"{self.base_url}/rest/api/1.0/projects/{self.project_slug}" f"/repos/{self.slug}/pull-requests" ) api_response = self._http.get(url) alerts = [] for pr_data in api_response["values"]: pr = PullRequest(pr_data, repo=self, record=self._record) alerts += pr.alerts() return alerts @property def full_slug(self): return f"{self.project_slug}/{self.slug}" class Checker: def __init__(self, *, url, record, http): self._base_url = url self._record = record self._http = http def check(self, project, repo): if not within_business_hours(datetime.now()): logger.info("skipping check: outside of business hours") return [] repo = Repo(self._base_url, project, repo, record=self._record, http=self._http) return repo.alerts() class JsonRecord: def __init__(self, *, database): self._db_path = Path(database) def record_alert(self, pr_id): db = self._read_db() db[pr_id] = datetime.now().isoformat() self._db_path.write_text(json.dumps(db)) def alerted_recently(self, pr_id): db = self._read_db() if pr_id not in db: return False return datetime.now() - datetime.fromisoformat(db[pr_id]) < timedelta(hours=3) def _read_db(self): try: return json.loads(self._db_path.read_text()) except FileNotFoundError: return {} def check(url, repos, *, reporter, config=Config()): http = HttpClient(verify_https=config.verify_https) record = JsonRecord(database=config.database) checker = Checker(url=url, record=record, http=http) alerts = [] for repo in repos: logger.info(f"checking repo: {repo}") project_slug, repo_slug = repo.split("/") alerts.extend(checker.check(project_slug, repo_slug)) reporter.report(alerts) PK!4nF prcop/cli.pyimport logging import click from .checker import check from .config import Config from .reporters import SlackReporter logging.basicConfig(level="WARNING", format="%(asctime)s [%(levelname)s] %(name)s %(message)s") @click.group() def cli(): pass @cli.command() @click.option("--bitbucket-url", required=True) @click.option("--slack-webhook", required=True) @click.option("--slack-channel", required=True) @click.option("--database") @click.option("--no-verify-https", is_flag=True) @click.option("-v", "--verbose", count=True) @click.argument("repos", nargs=-1, metavar="[REPO...]", required=True) def run(bitbucket_url, slack_webhook, slack_channel, repos, database, no_verify_https, verbose): if verbose: logging.getLogger().setLevel("DEBUG" if verbose > 1 else "INFO") config = Config(verify_https=not no_verify_https) if database: config.database = database reporter = SlackReporter(url=slack_webhook, channel=slack_channel) check(bitbucket_url, list(repos), reporter=reporter, config=config) PK!Oprcop/config.pyfrom dataclasses import dataclass @dataclass class Config: database: str = "/tmp/prcopdb.json" verify_https: bool = True PK!"Nprcop/http_client.pyimport requests import urllib3 class HttpClient: def __init__(self, *, verify_https=True): self._session = requests.Session() if not verify_https: self._session.verify = False urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def get(self, url): return self._session.get(url).json() def post(self, url, payload={}): self._session.post(url, json=payload) PK!Ԡwwprcop/reporters.pyfrom .http_client import HttpClient class SlackReporter: def __init__(self, *, url, channel): self._url = url self._channel = channel self._http = HttpClient() def report(self, alerts): for alert in alerts: payload = {"channel": self._channel, "text": str(alert)} self._http.post(self._url, payload=payload) PK!HJ}$'&prcop-0.1.1.dist-info/entry_points.txtN+I/N.,()*(J/z9V@PK!HڽTUprcop-0.1.1.dist-info/WHEEL A н#Z;/"d&F[xzw@Zpy3Fv]\fi4WZ^EgM_-]#0(q7PK!H̤md;prcop-0.1.1.dist-info/METADATASn0+ȥBɏ)'4q8i/=6kdRݪ=I3pxAT" F0ʆl.,Xvِel,QWT܂P肇`a#)7A@FM;!tU"WDf2FJQ6OƉut0t?-&k4cg6v,:Gs|֤3teXr32B)VglZ1 HoJ @6OTI"ࢆ qN)hu%>)wٞJRkisr'UTbCkpt Gm8>7PK!Hvh>oprcop-0.1.1.dist-info/RECORDuɒ@{? t;怀,* M@٤ͧcpyό°lJ݂""Ϧt.6+19Q) >Ǻ}1x}5;ȝF0&0,ڡ"I֣ă\#ublnHa&Ȓ[ֿAI]*v t\] V #Ap K sD!jǎ "Z4BRodte>q>p)D<d˲h EFP.|e z"[N4 \M\~XL˅ioϺGS\ᆶ\1NIںz]beqye({9|IK𲹴_Ϻ~ lfsGܵF۲V>Q.An}XE NgF (8`޽o.V^ɠT\"wraore(Ùykͅ&݆ ըW~t`[{ai Q$ӑ1/PK!`bfGGprcop/__init__.pyPK!ߖ(vprcop/alerts.pyPK!ʇxzprcop/business_hours.pyPK!prcop/checker.pyPK!4nF prcop/cli.pyPK!Oprcop/config.pyPK!"Nwprcop/http_client.pyPK!Ԡwwe prcop/reporters.pyPK!HJ}$'& "prcop-0.1.1.dist-info/entry_points.txtPK!HڽTUt"prcop-0.1.1.dist-info/WHEELPK!H̤md;#prcop-0.1.1.dist-info/METADATAPK!Hvh>ox%prcop-0.1.1.dist-info/RECORDPK +'