PK! qbrioa_port/__init__.py__version__ = '0.1.0' PK!*brioa_port/exceptions.pyclass BRIOAException(Exception): pass class InvalidWebcamImageException(BRIOAException): pass class FileHasInvalidLastModifiedDateException(BRIOAException): pass PK!G Ibrioa_port/log_keeper.pyimport pandas as pd from datetime import datetime from sqlalchemy.engine import Engine from typing import Optional from brioa_port.util.database import DATABASE_DATETIME_FORMAT from brioa_port.schedule_parser import SCHEDULE_DATE_COLUMNS class LogKeeper: """ Interacts with a database of schedule logs, writing and reading to pandas dataframes originating from the ScheduleParser. Attributes: engine: The database engine that pandas will connect to. """ LOGS_TABLE = 'logs' def __init__(self, engine: Engine) -> None: self.engine = engine def has_entries(self) -> bool: """ Checks if the database has been initialized. """ return bool(self.engine.dialect.has_table(self.engine, self.LOGS_TABLE)) def write_entries(self, date_retrieved: datetime, entries: pd.DataFrame) -> int: """ Inserts new log entries into the database. Args: date_retrieved: Will be checked against existing entries, as to not overwrite the logs with older information. entries: The new entries. Returns: The number of new entries which were inserted. """ # Add the date_retrieved timestamp to the entries as an index, # so it can be differentiated from earlier entries in the database. indexed_entries = entries.copy() indexed_entries['date_retrieved'] = date_retrieved indexed_entries = indexed_entries.set_index('date_retrieved') def is_entry_new(entry: pd.Series) -> bool: """ Determines if an entry has new information. For that, it must either: a. Be a new trip (trip name not in the database). b. Have at least one different value from the latest entry for that trip, and Have a more recent date_retrieved than the existing one. Args: entry: The log entry to be compared. Returns: Whether it's new or not. """ existing_entry = self.read_latest_entry_for_trip(entry['Viagem']) if existing_entry is not None: no_changes = entry.equals(existing_entry.drop('date_retrieved')) if no_changes: return False existing_date_retrieved = existing_entry['date_retrieved'].to_pydatetime() if existing_date_retrieved >= date_retrieved: return False return True if self.has_entries(): # Database has existing entries. # Insert only new entries, aka the ones with new information. new_entries = indexed_entries[indexed_entries.apply(is_entry_new, axis=1)] else: # Database is empty. Insert everything. new_entries = indexed_entries if not new_entries.empty: new_entries.to_sql(self.LOGS_TABLE, con=self.engine, if_exists='append') return len(new_entries) def read_entries_for_trip(self, trip_name: str) -> Optional[pd.DataFrame]: """ Queries the log entries for the given trip name, ordered from most to least recent. Args: trip_name: e.g. 'MCBF124' Returns: A dataframe with the log entries, or None if the trip is not found. """ date_dict = {x: DATABASE_DATETIME_FORMAT for x in SCHEDULE_DATE_COLUMNS + ['date_retrieved']} df = pd.read_sql( f'select * from {self.LOGS_TABLE} where Viagem = ? order by date_retrieved desc', con=self.engine, params=(trip_name,), parse_dates=date_dict ) return None if df.empty else df def read_latest_entry_for_trip(self, trip_name: str) -> Optional[pd.Series]: """ Queries the latest log entry for the given trip name. Args: trip_name: e.g. 'MCBF124' Returns: The entry, or None if the trip is not found. """ entries = self.read_entries_for_trip(trip_name) return None if entries is None else entries.iloc[0] def read_ships_at_port(self, arrives_before: datetime, sails_after: datetime) -> pd.DataFrame: """ Queries the ships present at the port in a given date/time range. Includes, ships that have arrived, berthed, or recently sailed (left). Args: arrives_before: Maximum threshold for the arrival date/time. e.g. include ships that arrived since X. sails_after: Minimum threshold for the sailing date/time. e.g. include ships that will be in the port until X. Returns: A dataframe contaiing the latest log entries for the relevant ships. Columns: Berço: berth number (may often be NaN, i.e. not determined) Navio: ship name Viagem: trip name TA: time of arrival TB: time of berthing TS: time of sailing TA_is_predicted: indicates if TA is actual (confirmed time), or an estimation. TB_is_predicted: indicates if TB is actual (confirmed time), or an estimation. TS_is_predicted: indicates if TS is actual (confirmed time), or an estimation. """ date_dict = {x: DATABASE_DATETIME_FORMAT for x in ['TA', 'TB', 'TS']} max_arrival = arrives_before.strftime(DATABASE_DATETIME_FORMAT) min_sailing = sails_after.strftime(DATABASE_DATETIME_FORMAT) return pd.read_sql(( "select\n" " Berço, Navio, Viagem, ifnull(ETA, ATA) as TA, ifnull(ATB, ETB) as TB, ifnull(ATS, ETS) as TS,\n" " ATA is NULL as TA_is_predicted, ATB is NULL as TB_is_predicted, ATS is NULL as TS_is_predicted\n" "from\n" f" {self.LOGS_TABLE}\n" "where\n" " TA <= datetime(?)\n" " and (TS >= datetime(?) or TS = NULL)\n" "group by\n" " Viagem\n" "having\n" " date_retrieved = max(date_retrieved)\n" "order by\n" " TA, TB, TS, logs.Navio, logs.Viagem"), con=self.engine, params=(max_arrival, min_sailing), parse_dates=date_dict ) PK!6--brioa_port/schedule_parser.pyimport pandas as pd SCHEDULE_DATE_COLUMNS = ['Abertura do Gate', 'Deadline', 'ETA', 'ATA', 'ETB', 'ATB', 'ETS', 'ATS'] def parse_dates(orig_df: pd.DataFrame) -> pd.DataFrame: """ Parses the date format that the schedule spreadsheet uses into proper datetime objects. Returns: A new data frame with the parsed dates. """ df = orig_df.copy() date_format = '%d/%m/%Y %H:%M:%S' for date_column in SCHEDULE_DATE_COLUMNS: df[date_column] = pd.to_datetime(df[date_column], format=date_format) return df def normalize_ship_name(ship_name: str) -> str: """ Removes the redundant "trip name" after the ship name. Args: ship_name: The raw string, e.g. 'BOATY MCBOATFACE - MCBF1234' Returns: The normalized name, e.g. 'BOATY MCBOATFACE' """ if ' - ' not in ship_name: return ship_name return ship_name.split(' - ')[0] def parse_schedule_spreadsheet(path: str) -> pd.DataFrame: """ Applies all the parsing and normalization steps to the raw schedule spreadsheet. Args: path: Where to read the spreadsheet from Returns: A dataframe with the parsed data. """ df = pd.read_excel(path) df = parse_dates(df) df['Navio'] = df['Navio'].apply(normalize_ship_name) return df PK!brioa_port/scripts/__init__.pyPK!!##$brioa_port/scripts/brioa_schedule.py#!/usr/bin/env python3 """BRIOA Schedule Downloader Usage: brioa_programacao.py update online [--period ] brioa_programacao.py update from_file [--retrieved-at ] brioa_programacao.py current brioa_programacao.py trip Options: --period To constantly update the database, set the update frequency with this option. --retrieved-at The date/time that the information in the file is from. ISO 8601 Format: 2000-01-01 00:00:00 By default, it's taken from the filename (unix timestamp, local time). """ import functools import time import pandas as pd import schedule import logging import sys from docopt import docopt from pathlib import Path from datetime import datetime from dateutil.relativedelta import relativedelta from typing import Dict, Optional from brioa_port.util.datetime import make_delta_human_readable from brioa_port.util.database import create_database_engine from brioa_port.util.entry import get_ship_status, get_ship_berth_number, ShipStatus from brioa_port.util.args import parse_period_arg from brioa_port.schedule_parser import parse_schedule_spreadsheet from brioa_port.log_keeper import LogKeeper logging.basicConfig(level=logging.WARNING) logger = logging.getLogger(__name__) def determine_entry_status(entry: pd.Series) -> str: """ Takes the raw data from a ship entry and presents a human readable status. Uses the arrival, berthing, and sailing dates to determine the status of the ship. """ # Use the current date to compare the with the others # Store it so that it's consistent over the runtime of the function now = datetime.now() status = get_ship_status(entry, now) berco = get_ship_berth_number(entry) if berco is None: berco_desc = 'T.B.D.' else: berco_desc = '#' + str(berco) if status == ShipStatus.TO_ARRIVE: is_prediction = entry['TA_is_predicted'] status_desc = f'Arrives {make_delta_human_readable(now, entry["TA"])}' elif status == ShipStatus.ARRIVED: is_prediction = entry['TA_is_predicted'] status_desc = f'Arrived, berths at {berco_desc} {make_delta_human_readable(now, entry["TB"])}' elif status == ShipStatus.BERTHED: is_prediction = entry['TB_is_predicted'] status_desc = f'Berthed at {berco_desc}, sails {make_delta_human_readable(now, entry["TS"])}' elif status == ShipStatus.SAILED: is_prediction = entry['TS_is_predicted'] status_desc = f'Sailed {make_delta_human_readable(now, entry["TS"])}' else: is_prediction = False status_desc = 'Unknown' if is_prediction: return '[PREDICTION] ' + status_desc return status_desc def update_online_once(database_path: str) -> None: spreadsheet_url = 'http://www.portoitapoa.com.br/excel/' logkeeper = LogKeeper(create_database_engine(database_path)) new_data = parse_schedule_spreadsheet(spreadsheet_url) date_retrieved = datetime.now() n_new_entries = logkeeper.write_entries(date_retrieved, new_data) n_new_entries_str = '1 new entry' if n_new_entries == 1 else f'{n_new_entries} new entries' logging.info(f'{date_retrieved.strftime("%Y-%m-%d %H:%M:%S")}: {n_new_entries_str}') def cmd_update_online(args: Dict[str, str]) -> None: """ Updates a given database by downloading the current schedule spreadsheet from the website. Will run only once, or in a loop, depending on if a period is specified. """ # No period specified. Do it once. if args['--period'] is None: update_online_once(args['']) return # Handle period option try: period = parse_period_arg(args['--period']) except ValueError as e: logger.critical("Error: %s", e) sys.exit(1) schedule.every(period).seconds.do( functools.partial(update_online_once, args['']) ) while True: schedule.run_pending() time.sleep(1) def cmd_update_from_file(args: Dict[str, str]) -> None: """ Updates a given database by reading from a given spreadsheet file. The date of retrieval for the information in the spreadsheet can be inferred from the filename, or specified from an option. """ logkeeper = LogKeeper(create_database_engine(args[''])) spreadsheet_path = Path(args['']) new_data = parse_schedule_spreadsheet(str(spreadsheet_path)) # Try to parse a date from the filename try: date_from_filename: Optional[datetime] = datetime.fromtimestamp(int(spreadsheet_path.stem)) except (ValueError, OverflowError, OSError): date_from_filename = None # Try to parse a date from the CLI option if args['--retrieved-at'] is not None: date_from_args = datetime.strptime(args['--retrieved-at'], '%Y-%m-%d %H:%M:%S') else: date_from_args = None # If no valid date is found, the update cannot happen if date_from_args is None and date_from_filename is None: logging.critical(( 'Unable to infer file retrieval date from filename.\n' 'Use a Unix timestamp as the filename, or specify the --retrieved-at option.' )) sys.exit(1) # Give preference to the date from the CLI option, if it's set date_retrieved = date_from_args if date_from_args is not None else date_from_filename n_new_entries = logkeeper.write_entries(date_retrieved, new_data) logging.info('1 new entry' if n_new_entries == 1 else f'{n_new_entries} new entries') def cmd_current(args: Dict[str, str]) -> None: """ Lists the ships that are currently at the port. Includes the arrived, berthed, and recenly sailed ships for the current day. """ logkeeper = LogKeeper(create_database_engine(args[''])) if not logkeeper.has_entries(): print("No entries found.") return now = datetime.now() in_a_day = now + relativedelta(days=1) start_of_today = now.replace(hour=0, minute=0, second=0, microsecond=0) entries = logkeeper.read_ships_at_port( # Include ships that have arrived or will arrive in 1 day arrives_before=in_a_day, # Include ships that won't sail today, i.e. are still at the port sails_after=start_of_today ) for index, entry in entries.iterrows(): print(f'{entry["Navio"]} ({entry["Viagem"]}): {determine_entry_status(entry)}') def cmd_trip(args: Dict[str, str]) -> None: """ Lists all the recorded log entries for the given trip name. """ logkeeper = LogKeeper(create_database_engine(args[''])) if not logkeeper.has_entries(): logger.error("No entries found.") return entry = logkeeper.read_latest_entry_for_trip(args['']) if entry is None: logger.error("Trip not found.") return def desc_event(action: str, date_expected: pd.Timestamp, date_actual: pd.Timestamp) -> str: """ Builds a message representing the relative lateness/earliness of an event (arrival, berthing, sailing) by comparing the expected (predicted) and actual occurrence dates. """ date_expected = date_expected.to_pydatetime() date_actual = date_actual.to_pydatetime() if pd.isnull(date_actual): return 'Yet to ' + action.lower() delta = make_delta_human_readable(date_actual, date_expected, absolute=True) if date_actual > date_expected: return action + ' late ' + delta if date_actual < date_expected: return action + ' early ' + delta return action + ' on time' berth = get_ship_berth_number(entry) berth_str = '?' if berth is None else str(berth) print(f'{entry["Navio"]} (owned by {entry["Armador"]}, length {entry["Comprimento(m)"]}m)') if not pd.isnull(entry['ATA']): print(desc_event('Arrived', entry['ETA'], entry['ATA']), 'at', entry['ATA']) else: print(desc_event('Arrive', entry['ETA'], entry['ATA']), 'at', entry['ETA']) if not pd.isnull(entry['ATB']): print(desc_event('Berthed', entry['ETB'], entry['ATB']), f'at {entry["ATB"]} (#{berth_str})') else: print(desc_event('Berth', entry['ETB'], entry['ATB']), f'at {entry["ETB"]} (#{berth_str})') if not pd.isnull(entry['ATS']): print(desc_event('Sailed', entry['ETS'], entry['ATS']), 'at', entry['ATS']) else: print(desc_event('Sail', entry['ETS'], entry['ATS']), 'at', entry['ETS']) def main() -> None: args = docopt(__doc__) if args['update'] and args['online']: cmd_update_online(args) if args['update'] and args['from_file']: cmd_update_from_file(args) elif args['current']: cmd_current(args) elif args['trip']: cmd_trip(args) if __name__ == '__main__': main() PK!"Ah -brioa_port/scripts/brioa_timelapse_creator.py"""BRIOA Timelapse Creator. Reads paths to the images that will make up the timelapse from the standard input. The images should be named with the unix timestamp at the time they were taken. Usage: brioa_timelapse_creator --database [--image-list-from-file ] [--output-fps ] [--output-resolution ] [--no-progress] Options: --database Information about the ships in port will be obtained here. --image-list-from-file Read the image list from a file instead of the standard input. --output-fps Framerate of the output [default: 30]. --output-resolution Resolution of the output. The valid values are 1080p or 720p [default: 1080p]. --no-progress Don't show a progress bar. """ import os.path import sys import logging from datetime import datetime from docopt import docopt from PIL import Image from subprocess import Popen, PIPE from pathlib import Path from typing import List, Generator, Iterable, Tuple, Dict, NamedTuple from tqdm import tqdm from brioa_port.timelapse_frame_processor import TimelapseFrameProcessor from brioa_port.util.database import create_database_engine from brioa_port.log_keeper import LogKeeper logging.basicConfig(level=logging.WARNING) logger = logging.getLogger(__name__) def start_ffmpeg_process(output_path: str, image_format: str, fps: int) -> Popen: """ Start and FFmpeg process that reads images from stdin in the given format and joins them into a video at the given output path. """ return Popen( [ 'ffmpeg', # Overwrite without confirmation '-y', # Don't output warnings and other information '-loglevel', 'error', # Take images from a pipe (duh) '-f', 'image2pipe', # The images will be in this format '-vcodec', image_format, '-framerate', str(fps), # Read from STDIN '-i', '-', # Output quality (h264 codec) '-crf', '22', output_path ], stdin=PIPE ) def make_frames_into_video(frames: Iterable[Image.Image], output_path: Path, fps: int) -> None: """ Takes some images and joins them into a video file using FFmpeg. Args: frames: The images to join. output_path: Where to put the video. fps: The framerate of the video. """ # Use PPM to pass the images to FFmpeg. # It's faster than, say, JPEG because it has no compression. image_format = 'ppm' ffmpeg_process = start_ffmpeg_process(str(output_path), image_format, fps) for frame in frames: frame.save(ffmpeg_process.stdin, image_format) ffmpeg_process.stdin.close() ffmpeg_process.wait() def process_images( image_paths: Iterable[str], frame_processor: TimelapseFrameProcessor, show_progress: bool ) -> Generator[Image.Image, None, None]: """ Takes some image paths and runs them through a frame processor, returns the results as each frame is completed. The date that is required by the processor is taken from each image's filename. Frames that fail to complete are ignored. """ for image_path in tqdm(image_paths, desc='Processing the images', unit='images', disable=not show_progress): try: with Image.open(image_path) as image: date = datetime.fromtimestamp(int(os.path.basename(image_path))) frame = frame_processor.make_frame(image, date) except OSError as e: logger.warning(f"Ignoring image at '{image_path}'. The error was: {e}") if frame is not None: yield frame def read_lines_from_stdin() -> List[str]: """ Reads from standard input and returns each line, with no EOL characters. """ return sys.stdin.read().splitlines() def read_lines_from_file(path: str) -> List[str]: """ Reads from a given path and returns each line, with no EOL characters. """ with open(path, 'rt') as f: lines: List[str] = f.read().splitlines() return lines class FrameProcessorArgs(NamedTuple): dimensions: Tuple[int, int] scaler: float font_sizes: Dict[str, int] def main() -> None: args = docopt(__doc__) log_keeper = LogKeeper(create_database_engine(args['--database'])) frame_processor_args = { '1080p': FrameProcessorArgs( dimensions=(1920, 1080), scaler=1, font_sizes={ 'huge': 64, 'large': 30, 'medium': 22, 'small': 18, } ), '720p': FrameProcessorArgs( dimensions=(1280, 720), scaler=0.7, font_sizes={ 'huge': 44, 'large': 25, 'medium': 18, 'small': 13, } ) }.get(args['--output-resolution'], None) if frame_processor_args is None: logging.critical('Invalid resolution.') sys.exit(1) frame_processor = TimelapseFrameProcessor( log_keeper, dimensions=frame_processor_args.dimensions, scaler_value=frame_processor_args.scaler, font_sizes=frame_processor_args.font_sizes ) if args['--image-list-from-file'] is None: image_paths = read_lines_from_stdin() else: image_paths = read_lines_from_file(args['--image-list-from-file']) frames = process_images(image_paths, frame_processor, show_progress=not args['--no-progress']) make_frames_into_video(frames, Path(args['']), int(args['--output-fps'])) if __name__ == '__main__': main() PK!t-brioa_port/scripts/brioa_webcam_downloader.py"""BRIOA Webcam Downloader Periodically downloads images from the Port of Itapoa webcam to a specified output directory. The filenames are the UNIX timestamp representing the time that the photo was taken. The output directory will be created if it doesn't exist. Usage: brioa_webcam_downloader.py [--period ] [--verbose | --quiet] Options: -v, --verbose Show more information messages --period How often to download an image [default: 20]. """ import schedule import time import os import errno import sys import logging from docopt import docopt from pathlib import Path from brioa_port.webcam_downloader import download_webcam_image from brioa_port.exceptions import InvalidWebcamImageException from brioa_port.util.args import parse_period_arg logging.basicConfig(level=logging.WARNING) logger = logging.getLogger(__name__) def safe_download(webcam_url: str, output_dir: Path) -> None: """ Task for the scheduler. Downloads an image and ignores exceptions. """ try: image_path = download_webcam_image(webcam_url, output_dir) logger.info("Downloaded " + image_path.stem) except InvalidWebcamImageException: logger.warning("Got invalid image. Continuing.") def main() -> None: arguments = docopt(__doc__) webcam_url = 'http://www.portoitapoa.com.br/images/camera/camera.jpeg' output_dir_path = Path(arguments['']) # Handle logging options if arguments['--verbose']: logger.setLevel(logging.DEBUG) if arguments['--quiet']: logging.disable(logging.CRITICAL) # Handle period option try: period = parse_period_arg(arguments['--period']) except ValueError as e: logger.critical("Error: %s", e) sys.exit(1) # Handle output dir argument, creates it if necessary try: os.makedirs(output_dir_path) except OSError as e: if e.errno != errno.EEXIST: logger.critical("Error: Unable to create output directory.") sys.exit(1) # Download in a loop! schedule.every(period).seconds.do(lambda: safe_download(webcam_url, output_dir_path)) while True: schedule.run_pending() time.sleep(1) if __name__ == '__main__': main() PK!ȑC'C''brioa_port/timelapse_frame_processor.pyimport pandas as pd from dateutil.relativedelta import relativedelta from datetime import datetime from PIL import Image, ImageDraw, ImageFont from babel.dates import format_date, format_time from typing import Tuple, Dict, Optional from brioa_port.log_keeper import LogKeeper from brioa_port.util.entry import get_ship_status, get_ship_berth_number, \ ShipStatus class TimelapseFrameProcessor: """ Turns raw webcam images into timelapse frames, with overlayed information. Attributes: log_keeper: Source for the ship schedule information. dimensions: Output frame resolution. Preferably with a 16:9 aspect ratio. scaler_value: Adjust this to change the spaces between elements, at different resolutions. font_sizes: Specify the font sizes that work best for the resolution. The following keys are required: 'huge', 'large', 'medium', and 'small'. font_path_or_name: From where to load the font. Will search system directories. """ def __init__( self, log_keeper: LogKeeper, dimensions: Tuple[int, int] = (1920, 1080), scaler_value: float = 1, font_sizes: Optional[Dict[str, int]] = None, font_path_or_name: str = 'DejaVuSans' ) -> None: self.log_keeper = log_keeper self.dimensions = dimensions self.scaler_value = scaler_value default_font_sizes = { 'huge': 64, 'large': 30, 'medium': 22, 'small': 18, } if font_sizes is None: font_sizes = default_font_sizes elif len(set(font_sizes.keys()).difference(default_font_sizes.keys())) != 0: raise ValueError('All font sizes must be specified.') self.fonts = { 'huge': self._load_font(font_path_or_name, font_sizes['huge']), 'large': self._load_font(font_path_or_name, font_sizes['large']), 'medium': self._load_font(font_path_or_name, font_sizes['medium']), 'small': self._load_font(font_path_or_name, font_sizes['small']) } self.font_path = self.fonts['medium'].path self.colors = { 'background': '#05021a', 'date_box_background': '#cc2f26', 'date_box_text': 'white', 'berthed_ship_box_background_even': '#00a32e', 'berthed_ship_box_background_odd': '#00d63d', 'berthed_ship_box_text': 'white', } def _load_font(self, font_path_or_name: str, size: int) -> ImageFont.FreeTypeFont: """ Loads a font file from the system. """ try: return ImageFont.truetype(font_path_or_name, size) except IOError: raise ValueError(f"Could not load font at '{self.font_path}'") def _make_canvas(self) -> Image.Image: """ Makes a blank image to draw the frame onto. """ return Image.new('RGB', self.dimensions, self.colors['background']) def _get_berthed_ships(self, date: datetime) -> pd.DataFrame: """ Queries the LogKeeper instance to obtain a list of the ships berthed to the port at the given date. """ start_of_today = date.replace(hour=0, minute=0, second=0, microsecond=0) ships_at_port = self.log_keeper.read_ships_at_port( start_of_today + relativedelta(days=1), start_of_today ) return ships_at_port[ ships_at_port.apply( lambda ship: get_ship_status(ship, date) == ShipStatus.BERTHED, axis=1 ) ].reset_index(drop=True) def _draw_date_box(self, draw: ImageDraw.Draw, top_left_corner: Tuple[int, int], width: int, date: datetime) -> int: """ Draws a date/time clock onto the canvas at the given position. Args: draw: The ImageDraw instance to draw on the desired canvas. top_left_corner: Where to start the box. width: The fixed horizontal dimension of the box. date: The date/time to display on the clock. Returns: The bottom y coordinate of the box, so that further elements may be drawn after it """ time_str = format_time(date, 'HH:mm', locale='pt_BR') time_font = self.fonts['huge'] time_font_height = time_font.getsize('X')[1] date_str = format_date(date, "EEEE\ndd 'de' MMM 'de' yyyy", locale='pt_BR').capitalize() date_font = self.fonts['large'] date_font_height = date_font.getsize('X')[1] margin = 20 * self.scaler_value bottom_y = int(top_left_corner[1] + time_font_height + date_font_height * 2 + margin * 4) draw.rectangle( ( top_left_corner[0], top_left_corner[1], top_left_corner[0] + width, bottom_y ), fill=self.colors['date_box_background'] ) time_y = top_left_corner[1] + margin draw.text( (top_left_corner[0] + margin, time_y), time_str, fill=self.colors['date_box_text'], font=time_font, ) draw.multiline_text( (top_left_corner[0] + margin, time_y + time_font_height + margin), date_str, fill=self.colors['date_box_text'], font=date_font ) return bottom_y def _draw_berthed_ship_box( self, draw: ImageDraw.Draw, top_left_corner: Tuple[int, int], width: int, ship: pd.Series ) -> int: """ Draws a berthed ship's information on a box. Args: draw: The ImageDraw instance to draw on the desired canvas. top_left_corner: Where to start the box. width: The fixed horizontal dimension of the box. ship: Source of the information for the ship, obtained from the LogKeeper. Returns: The bottom y coordinate of the box, so that further elements may be drawn after it """ berco = get_ship_berth_number(ship) # Alternate colors for different designated berthing numbers. if berco is None or berco % 2 == 0: background_color = self.colors['berthed_ship_box_background_even'] else: background_color = self.colors['berthed_ship_box_background_odd'] name_str = ship['Navio'] name_font = self.fonts['large'] # Resize the font until the name fits within the specified width. while (name_font.size > 12) and (name_font.getsize(name_str)[0] > (width - 30)): name_font = self._load_font(self.font_path, max(1, name_font.size - 1)) name_font_height = name_font.getsize('X')[1] berco_str = 'Berço ' + str(berco) berco_font = self.fonts['medium'] berco_font_height = berco_font.getsize('X')[1] margin = (20 * self.scaler_value) bottom_y = int(top_left_corner[1] + name_font_height + berco_font_height + margin * 3) draw.rectangle( ( top_left_corner[0], top_left_corner[1], top_left_corner[0] + width, bottom_y ), fill=background_color ) draw.multiline_text( (top_left_corner[0] + margin, top_left_corner[1] + margin), name_str, fill=self.colors['berthed_ship_box_text'], font=name_font ) if berco is not None: draw.multiline_text( (top_left_corner[0] + margin, top_left_corner[1] + name_font_height + margin * 2), berco_str, fill=self.colors['berthed_ship_box_text'], font=berco_font ) return bottom_y def make_frame(self, image: Image.Image, date: datetime) -> Image.Image: """ Processes a webcam image into a timelapse frame. Args: image: The raw image. Will be resized and pasted onto the final frame. date: The time that the image was taken. Used to correlating other information. Returns: The processed frame, in the form of a new image. """ canvas = self._make_canvas() try: # Correct for SDTV 480i pixel aspect ratio # https://en.wikipedia.org/wiki/Standard-definition_television#Pixel_aspect_ratio if image.width == 704 and image.height == 480: image_aspect_ratio = 640 / 480 else: image_aspect_ratio = image.width / image.height # Try to fit the image to a fraction of the canvas width # (to leave some space for the sidebar) new_image_width = int(canvas.width * 0.83) new_image_height = int(new_image_width / image_aspect_ratio) # If that would make the image go off canvas vertically, # fit it to the canvas height instead if new_image_height > canvas.height: new_image_height = canvas.height new_image_width = int(new_image_height * image_aspect_ratio) image = image.resize( (new_image_width, new_image_height), Image.BICUBIC ) image_x = canvas.width - image.width canvas.paste(image, (image_x, 0)) draw = ImageDraw.Draw(canvas) except OSError: return None date_bottom_y = self._draw_date_box(draw, (0, 0), image_x, date) ships_berthed = self._get_berthed_ships(date) ship_box_height = 0 for index, ship in ships_berthed.iterrows(): ship_box_top_y = date_bottom_y + (index * ship_box_height) ship_box_bottom_y = self._draw_berthed_ship_box(draw, (0, ship_box_top_y), image_x, ship) ship_box_height = ship_box_bottom_y - ship_box_top_y return canvas PK!brioa_port/util/__init__.pyPK!dE$EEbrioa_port/util/args.py def parse_period_arg(arg: str) -> int: """ Makes sure that a period argument is a valid postive integer. """ try: period = int(arg) except ValueError: raise ValueError("Period must be an integer") if period < 0: raise ValueError("Period cannot be negative") return period PK!x+))brioa_port/util/database.pyimport sqlalchemy DATABASE_DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S' def create_database_engine(path: str) -> sqlalchemy.engine.Engine: """ Creates an SQLAlchemy database engine for the SQLite database at the given path. """ return sqlalchemy.create_engine('sqlite:///' + path) PK! Jbrioa_port/util/datetime.pyimport time from datetime import datetime from dateutil.relativedelta import relativedelta def get_unix_timestamp_from_local_datetime(date: datetime) -> int: """ Turns a datetime object into an unix epoch timestamp integer. """ return int(time.mktime(date.timetuple())) def make_delta_human_readable(start_date: datetime, end_date: datetime, absolute: bool = False) -> str: """ Humanizes a timespan (difference between two datetime objects). Will show differences in years, month, days, hours, and minutes, as needed. The 'absolute' argument makes the output indifferent to the order of the start and end dates. Examples: 2010-01-01, 2010-01-01 -> 'now' 2010-01-01, 2010-01-01, absolute mode -> 'no difference' 2010-01-01, 2010-01-02 -> 'in 1 day' 2010-01-02, 2010-01-01 -> '1 day ago' 2010-01-01, 2010-01-02, absolute mode -> 'in 1 day' 2010-01-02, 2010-01-01, absolute mode -> 'in 1 day' 2010-01-01 00:00:00, 2010-01-02 01:42:03 -> in 1 day, 1 hour, 42 minutes """ if start_date == end_date: return 'no difference' if absolute else 'now' dates_descending = sorted((start_date, end_date), reverse=True) delta = relativedelta(dates_descending[0], dates_descending[1]) attrs = ['years', 'months', 'days', 'hours', 'minutes'] attr_string = ', '.join( '%d %s' % (getattr(delta, attr), getattr(delta, attr) > 1 and attr or attr[:-1]) for attr in attrs if getattr(delta, attr) ) if absolute: return 'by ' + attr_string if start_date < end_date: return 'in ' + attr_string return attr_string + ' ago' PK!P&xxbrioa_port/util/entry.pyimport pandas as pd from datetime import datetime from enum import Enum from typing import Optional class ShipStatus(Enum): UNKNOWN = -1 TO_ARRIVE = 0 ARRIVED = 1 BERTHED = 2 SAILED = 3 def get_ship_status(ship_entry: pd.Series, date: datetime) -> ShipStatus: """ Determines the status of a ship from the dates in it's log entry, and a given date to compare, e.g. 'now'. Args: ship_entry: The ship entry, with the arrival, berthing, and sailing dates. date: What to compare the log dates with. Returns: The ShipStatus. """ yet_to_arrive = ship_entry['TA'] >= date yet_to_berth = ship_entry['TB'] >= date yet_to_sail = ship_entry['TS'] >= date has_sailed = ship_entry['TS'] < date if yet_to_arrive: return ShipStatus.TO_ARRIVE if yet_to_berth: return ShipStatus.ARRIVED if yet_to_sail: return ShipStatus.BERTHED if has_sailed: return ShipStatus.SAILED return ShipStatus.UNKNOWN def get_ship_berth_number(ship_entry: pd.Series) -> Optional[int]: """ Converts the raw ship entry info into an nice number. Args: ship_entry: The log entry with the berth info. Returns: The berth number, or None if there isn't one. """ if not pd.isnull(ship_entry['Berço']): return int(ship_entry['Berço']) return None PK! ; brioa_port/util/request.pyimport email.utils import shutil import urllib.request from datetime import datetime from email.header import Header from http.client import HTTPResponse from pathlib import Path from typing import Optional, Union, NamedTuple, cast from dateutil import tz from brioa_port.exceptions import FileHasInvalidLastModifiedDateException from brioa_port.util.datetime import get_unix_timestamp_from_local_datetime def save_latest_file_from_url(url: str, output_dir: Path) -> Path: """ Downloads a file and names it according to the reported last modified date. Throws an error if no date is returned by the server. Args: url: Where to download from. output_dir: Where to download to. Returns: Where the file was saved (including the filename with the date). """ response = open_latest_file_from_url(url) output_path = output_dir / str(get_unix_timestamp_from_local_datetime(response.last_modified_date)) if output_path.exists(): raise FileExistsError() with output_path.open('wb') as out_file: shutil.copyfileobj(response.file, out_file) response.file.close() return output_path class ResponseWithLastModifiedDate(NamedTuple): file: HTTPResponse last_modified_date: datetime def open_latest_file_from_url(url: str) -> ResponseWithLastModifiedDate: """ Opens a connection to a remote file and checks the last modified date that the server returns. Throws an error if no date is returned by the server, or if it's invalid. """ response = cast(HTTPResponse, urllib.request.urlopen(url)) last_modified_header = response.info().get('Last-Modified', None) if last_modified_header is None: raise FileHasInvalidLastModifiedDateException() last_modified_date = parse_last_modified_date(last_modified_header) if last_modified_date is None: raise FileHasInvalidLastModifiedDateException() return ResponseWithLastModifiedDate( response, last_modified_date ) def parse_last_modified_date(last_modified_header: Union[Header, str]) -> Optional[datetime]: """ Parses an HTTP Last-Modified-Date header (RFC 7231). Args: last_modified_header: Accepts the date string, or a Header object. Returns: The parsed datetime object, or None, if no date could be parsed. """ if isinstance(last_modified_header, Header): last_modified_str = str(last_modified_header) else: last_modified_str = last_modified_header if last_modified_str.strip() == "": return None try: return email.utils.parsedate_to_datetime(last_modified_str).astimezone(tz.tzlocal()) except (TypeError, ValueError, IndexError): return None PK!brioa_port/webcam_downloader.pyfrom pathlib import Path from urllib.error import URLError, HTTPError, ContentTooShortError from brioa_port.exceptions import InvalidWebcamImageException, FileHasInvalidLastModifiedDateException from brioa_port.util.request import save_latest_file_from_url def download_webcam_image(webcam_url: str, output_dir_path: Path) -> Path: """ Downloads an image from the given URL to the given directory, with the filename representing the 'last modified date' that the server responds with. Args: webcam_url: Where to download from output_dir_path: Where to download to Returns: The path to the saved file. """ try: return save_latest_file_from_url(webcam_url, output_dir_path) except (URLError, HTTPError, ContentTooShortError, FileHasInvalidLastModifiedDateException, FileExistsError): raise InvalidWebcamImageException() PK!Hd6^+brioa_port-0.1.0.dist-info/entry_points.txt}A >$BV]hA]Y _B803T8)N(ײ++p v,UE%QU 4N*w>OC -?IuPK!HڽTU brioa_port-0.1.0.dist-info/WHEEL A н#Z;/"d&F[xzw@Zpy3Fv]\fi4WZ^EgM_-]#0(q7PK!Hf}#brioa_port-0.1.0.dist-info/METADATAT]o6}ׯЗ$i`dfN$C0WҵED!c;'ۓ$so)'O?:edJ'"P6'ۿC/u5DS:17Vʳ-{U-D`K__pL HKO*eܯiS k7 M*3ƪ Ys2зӏB7}֤vQ؟ j N>C,x{v,{'xVsjW,,i\PhTLϕR.λ#y;+Uf wrMfj Cy$Gs!PۋhrJa)*jk*g'HMZL)cΜ(Ö U{|V͒mL9e Y&'F(Xh CQe}Vo>ZI#mUT1أaqJ sϩp^@k7RP5X'1XEn%`Q_+[Ew~ ,T(h53nVogdfU܊ueqQl=~p6*i Iqu1fTriE[YcʊLebQLL=vF@7:(~"l@5N0NEݱ5/A^ȀPѺf`PK!HB_i;!brioa_port-0.1.0.dist-info/RECORD˒H}? r],D\\LP|鎊Z"IXq4EUW(zf1uZn/^UFo?#foTM;muov3Ez/2WNUJ?%*jz}@ 3mM YzܘGMWFvO9,?HBǰak:/|W|g,AK_|Rӆ PK! qbrioa_port/__init__.pyPK!*Jbrioa_port/exceptions.pyPK!G I2brioa_port/log_keeper.pyPK!6--brioa_port/schedule_parser.pyPK!brioa_port/scripts/__init__.pyPK!!##$( brioa_port/scripts/brioa_schedule.pyPK!"Ah -Cbrioa_port/scripts/brioa_timelapse_creator.pyPK!t-E[brioa_port/scripts/brioa_webcam_downloader.pyPK!ȑC'C''dbrioa_port/timelapse_frame_processor.pyPK! brioa_port/util/__init__.pyPK!dE$EEBbrioa_port/util/args.pyPK!x+))brioa_port/util/database.pyPK! Jbrioa_port/util/datetime.pyPK!P&xxbrioa_port/util/entry.pyPK! ; brioa_port/util/request.pyPK!brioa_port/webcam_downloader.pyPK!Hd6^+^brioa_port-0.1.0.dist-info/entry_points.txtPK!HڽTU brioa_port-0.1.0.dist-info/WHEELPK!Hf}#brioa_port-0.1.0.dist-info/METADATAPK!HB_i;!~brioa_port-0.1.0.dist-info/RECORDPK]