PK!=DGGprcop/__init__.pyfrom .checker import check __version__ = "0.3.0" __all__ = ["check"] PK!+ aprcop/alerts.pyclass ReviewOverdueAlert: def __init__(self, pr): self.pr = pr def __str__(self): hours_since_updated = int(self.pr.business_hours_since_updated.total_seconds() / 3600) reviews_str = "review" if self.pr.reviews_remaining == 1 else "reviews" return ( f'Pull request "{self.pr.title}" ' f"(last updated {hours_since_updated} business hours ago) " f"needs {self.pr.reviews_remaining} more {reviews_str}:\n" f"{self.pr.url}" ) PK!ʇxprcop/business_hours.pyfrom datetime import datetime, time, timedelta START_OF_DAY = time(9) END_OF_DAY = time(17) SAT = 5 SUN = 6 def within_business_hours(dt): return dt.weekday() < SAT and START_OF_DAY <= dt.time() <= END_OF_DAY def business_hours_between_dates(start, end): if start.time() < START_OF_DAY: start = datetime.combine(start.date(), START_OF_DAY) elif start.time() >= END_OF_DAY: start = datetime.combine(start.date() + timedelta(days=1), START_OF_DAY) if start.weekday() in (SAT, SUN): start = datetime.combine(start + timedelta(days=start.weekday() - 4), START_OF_DAY) if end.time() <= START_OF_DAY: end = datetime.combine(end.date() - timedelta(days=1), END_OF_DAY) elif end.time() > END_OF_DAY: end = datetime.combine(end.date(), END_OF_DAY) if end.weekday() in (SAT, SUN): end = datetime.combine(end - timedelta(days=end.weekday() - 4), END_OF_DAY) if end <= start: return timedelta(0) outside_business_hours = 0 d = start.date() while d < end.date(): outside_business_hours += 24 if d.weekday() in (SAT, SUN) else 24 - 17 + 9 d += timedelta(days=1) return end - start - timedelta(hours=outside_business_hours) PK!+ccprcop/checker.pyimport json import logging from datetime import datetime, timedelta from json.decoder import JSONDecodeError from pathlib import Path from .alerts import ReviewOverdueAlert from .business_hours import business_hours_between_dates, within_business_hours from .config import Config from .exceptions import FailedToGetData from .http_client import HttpClient logger = logging.getLogger(__name__) class PullRequest: _MIN_TIME_SINCE_UPDATED = timedelta(hours=3) _MIN_APPROVALS = 2 def __init__(self, data, *, repo, record): self._data = data self._repo = repo self._record = record def alerts(self): id_str = f"{self._repo.full_slug}#{self._id}" logger.debug(f"{id_str} business hours since updated: {self.business_hours_since_updated}") logger.debug(f"{id_str} recently alerted: {self._recently_alerted}") logger.debug(f"{id_str} reviews remaining: {self.reviews_remaining}") logger.debug(f"{id_str} needs work: {self._needs_work}") if ( self.business_hours_since_updated >= self._MIN_TIME_SINCE_UPDATED and not self._recently_alerted and self.reviews_remaining and not self._needs_work ): self._record.record_alert(self._id) return [ReviewOverdueAlert(self)] return [] @property def reviews_remaining(self): return max(self._MIN_APPROVALS - self._approvals, 0) @property def _id(self): return str(self._data["id"]) @property def title(self): return self._data["title"] @property def _recently_alerted(self): return self._record.alerted_recently(self._id) @property def _approvals(self): return sum(review["status"] == "APPROVED" for review in self._data["reviewers"]) @property def _needs_work(self): return any(review["status"] == "NEEDS_WORK" for review in self._data["reviewers"]) @property def business_hours_since_updated(self): return business_hours_between_dates( datetime.fromtimestamp(self._data["updatedDate"] / 1000), datetime.now() ) @property def url(self): return ( f"{self._repo.base_url}/projects/{self._repo.project_slug}/" f"repos/{self._repo.slug}/pull-requests/{self._id}/" ) class Repo: def __init__(self, base_url, project, repo, *, record, http): self.base_url = base_url self.project_slug = project self.slug = repo self._record = record self._http = http def alerts(self): url = ( f"{self.base_url}/rest/api/1.0/projects/{self.project_slug}" f"/repos/{self.slug}/pull-requests" ) api_response = self._http.get(url) try: prs = api_response.json()["values"] except (JSONDecodeError, KeyError): raise FailedToGetData( f"{self.full_slug} failed to return pr data: {api_response.text}" ) alerts = [] for pr_data in prs: pr = PullRequest(pr_data, repo=self, record=self._record) alerts += pr.alerts() return alerts @property def full_slug(self): return f"{self.project_slug}/{self.slug}" class Checker: def __init__(self, *, url, record, http): self._base_url = url self._record = record self._http = http def check(self, project, repo): if not within_business_hours(datetime.now()): logger.info("skipping check: outside of business hours") return [] repo = Repo(self._base_url, project, repo, record=self._record, http=self._http) return repo.alerts() class JsonRecord: def __init__(self, *, database): self._db_path = Path(database) def record_alert(self, pr_id): db = self._read_db() db[pr_id] = datetime.now().isoformat() self._db_path.write_text(json.dumps(db)) def alerted_recently(self, pr_id): db = self._read_db() if pr_id not in db: return False return datetime.now() - datetime.fromisoformat(db[pr_id]) < timedelta(hours=3) def _read_db(self): try: return json.loads(self._db_path.read_text()) except FileNotFoundError: return {} def check(url, repos, *, reporter, config=Config()): http = HttpClient(verify_https=config.verify_https) record = JsonRecord(database=config.database) checker = Checker(url=url, record=record, http=http) alerts = [] exception = None for repo in repos: logger.info(f"checking repo: {repo}") project_slug, repo_slug = repo.split("/") try: alerts.extend(checker.check(project_slug, repo_slug)) except FailedToGetData as exc: exception = exc reporter.report(alerts) if exception: raise exception PK!3W prcop/cli.pyimport logging import click from .checker import check from .config import Config from .reporters import SlackReporter logging.basicConfig(level="WARNING", format="%(asctime)s [%(levelname)s] %(name)s %(message)s") @click.group() def cli(): pass @cli.command() @click.option("--bitbucket-url", required=True) @click.option("--slack-webhook", required=True) @click.option("--slack-channel", required=True) @click.option("-i", "--input", "input_file", type=click.File("r")) @click.option("--database") @click.option("--no-verify-https", is_flag=True) @click.option("-v", "--verbose", count=True) @click.argument("repos", nargs=-1, metavar="[REPO...]") def run( bitbucket_url, slack_webhook, slack_channel, input_file, database, no_verify_https, verbose, repos, ): if verbose: logging.getLogger().setLevel("DEBUG" if verbose > 1 else "INFO") repos = list(repos) if input_file: repos.extend(l.strip() for l in input_file.readlines()) config = Config(verify_https=not no_verify_https) if database: config.database = database reporter = SlackReporter(url=slack_webhook, channel=slack_channel) check(bitbucket_url, repos, reporter=reporter, config=config) PK!Mr~~prcop/config.pyfrom dataclasses import dataclass @dataclass class Config: database: str = "prcopdb.json" verify_https: bool = True PK! \\prcop/exceptions.pyclass PrCopException(Exception): pass class FailedToGetData(PrCopException): pass PK!VӸprcop/http_client.pyimport json from dataclasses import dataclass import requests import urllib3 @dataclass class HttpResponse: status_code: int text: str def json(self): return json.loads(self.text) class HttpClient: def __init__(self, *, verify_https=True): self._session = requests.Session() if not verify_https: self._session.verify = False urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def get(self, url): response = self._session.get(url) return HttpResponse(status_code=response.status_code, text=response.text) def post(self, url, payload={}): self._session.post(url, json=payload) PK!Ԡwwprcop/reporters.pyfrom .http_client import HttpClient class SlackReporter: def __init__(self, *, url, channel): self._url = url self._channel = channel self._http = HttpClient() def report(self, alerts): for alert in alerts: payload = {"channel": self._channel, "text": str(alert)} self._http.post(self._url, payload=payload) PK!HJ}$'&prcop-0.3.0.dist-info/entry_points.txtN+I/N.,()*(J/z9V@PK!199prcop-0.3.0.dist-info/LICENSEThe MIT License (MIT) Copyright (c) 2019 Robbie Clarken Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. PK!HڽTUprcop-0.3.0.dist-info/WHEEL A н#Z;/"d&F[xzw@Zpy3Fv]\fi4WZ^EgM_-]#0(q7PK!H*zprcop-0.3.0.dist-info/METADATATN1}WL*!D-JEhh%î%4zR9s=G AO2beC0=1=6M#|lKhYLAjtC0Qd lƖ3C03teo 6jM}i[YQ:YVP\ɮL+ifU'P7 ʱJu>Q,odMz~9'<{t8i 彺WHOq C0KXZn@c-0AheG q19ckͫLg+ҙɆ^lHLEπJ[=.Oo0UErUPq|B mp_v^ί/{:E_8eILEsK #diiZƶzg/nOr_Wr NΔUSMd[ԇ3;)ZӴGj*kB,e^@9!y#Od6y(2;?OuL\S y ~c{&et1(:GksZBz6m '*䱘b (ELJaNU XeA- K$H"xb _*NUXTߍژ隒^t*@G̢m'TK 9fo_qh'PK!HWprcop-0.3.0.dist-info/RECORDu͒H}? T#,f !3 Hz('snm;M %,Mߺp&)_hHV͖И'vX#V &qlxQfE~IBs>tWnY(Qp" eEÐv_9vzV 2dWv 5Br /P0At ] e֠~~;-Ylp ]PM^Mbh Veɭӹ$!O剴$c%nƦi WUlRkB6ퟫoM9ikA3`S{iHi&i,m*6 t*7 08YfKNCP":`@\YEiIP.΃qwCũ3Ug? Ҹ}6?F1U'hDNq xM~ 20вӮ%ĞLC[`E}k>%VkQ6 q:&ijG5t<'r$ZhSyR3L^ 6Ӽ>MI@ֹz/3iP>ݤO%x`Xw$ GUVN Q0k1SYқ>>'*?B=8PK!=DGGprcop/__init__.pyPK!+ avprcop/alerts.pyPK!ʇxprcop/business_hours.pyPK!+ccprcop/checker.pyPK!3W Kprcop/cli.pyPK!Mr~~R prcop/config.pyPK! \\ prcop/exceptions.pyPK!VӸ!prcop/http_client.pyPK!Ԡwwt$prcop/reporters.pyPK!HJ}$'&&prcop-0.3.0.dist-info/entry_points.txtPK!199&prcop-0.3.0.dist-info/LICENSEPK!HڽTU*prcop-0.3.0.dist-info/WHEELPK!H*z+prcop-0.3.0.dist-info/METADATAPK!HW.prcop-0.3.0.dist-info/RECORDPK\1