#!/usr/bin/env python3

import json
import logging
import math
import os
import re
import tarfile
import textwrap

from collections import defaultdict
from fuzzywuzzy import fuzz, process
import requests

from base.file.utils.file_utils import mkdir_p_public, make_file_public_recursive
from base.jira import templates
from bravado.exception import HTTPNotFound, HTTPConflict
from data.automated_triage import sun_utils
from data.automated_triage.postprocessing.params import HASHTAGS
from data.automated_triage.triagers import lib
from data.chum import chumpy
from data.ingest.cluster.geofences.lib import get_entered_geofences
from data.ingest.cluster.utils import DataCatalogQueryError, log_chum_error
from infra.data_catalog.client import data_rest_api
from infra.tools.logfile_util import fetch_run_log_by_name

PARAMS_EXTRACT_SUBDIR = 'params'
ISSUES_SUBDIR = 'issues'
NOTE_TYPE = 'note'
DISENGAGEMENT_TYPE = 'disengagement'
VANILLA_TRIAGER = 'Vanilla'
ONLY_VANILLA = 'VanillaUponCreation'
VANILLA_COLOR = '#d3f8d3'
GRAY_COLOR = '#adadad'
NOT_DEDUPED = 'not_deduped'

TRIAGE_BUCKET_MAP = {
    '^aod': 'AO Discretionary',
    '^artbus': 'Articulated Agent',
    '^bad_do': 'Bad Pickup Dropoff Point',
    '^bike': 'Bicyclist Handling',
    '^bikelane': 'Merge into Bike Lane',
    '^cutin': 'Cut-in',
    '^cz': 'Construction Zone',
    '^debris': 'Debris',
    '^ev': 'Emergency Vehicles',
    '^exhaust': 'Lidar Distractors',
    '^gor': 'Get on Road',
    '^junction': 'Non traffic light junction',
    '^lc': 'Lane change / merge',
    '^loc': 'Pose System',
    '^m_block': 'Pass blockage same direction',
    '^narrow': 'Narrow Handling',
    '^nogo': 'NoGo (not planner not localizer)',
    '^o_block': 'Pass blockage into oncoming',
    '^oncoming': 'Oncoming vehicle in our lane',
    '^opendoor': 'OpenDoor',
    '^parking': 'Parking Action',
    '^pcp': 'Perception Failure',
    '^pedcw': 'Pedestrian crosswalk handling',
    '^pedroad': 'Pedestrian on road',
    '^planner': 'Planner Bug',
    '^plots': 'Parking lot',
    '^pvd': 'Parked Vehicle Detection',
    '^reversing': 'Agent Reversing',
    '^tld': 'Traffic light junction',
    '^tlh': 'Traffic light junction',
    '^unpredictable': 'Freeform Vehicles',
    '^zrn': 'ZRN Bug',
}

# Map of event title keyword to slack channel
SLACK_KEYWORDS = {
    '@@carta': 'zigalert-ops',
}
# Token for the Slack IngestBot (https://api.slack.com/apps/A01EXP438UQ)
SLACK_OAUTH_TOKEN = 'xoxb-4306046359-1469237736535-VC0wzjMcnajRNn6Jg48G9GWh'

USER_MENTION_PATTERN = re.compile("@@([\w-]+)")

logger = logging.getLogger(__name__)

TRAFFIC_LIGHT_STATES_ZRN_TOPIC = "/mapping/map_select/zrn/value/release"

FAILED_LINE_REGEXP = re.compile(r'#0 .* at (.*:[0-9]+)')

class VehicleTicketsError(RuntimeError):
    pass


class RunFileIntegrityError(RuntimeError):
    pass

def get_event_priority(event):
    '''
    :param: event: event object
    :return: Priority for event (string)
    '''
    if event.event.type != 'DISENGAGEMENT':
        return None

    if event.reason == 'CAUTION':
        return 'High'

    if event.reason == 'CRITICAL':
        return 'Highest'

    return None

def is_point_to_point_run(run):
    run_title = run.get('short_name', '')
    return 'p2pdc' in run_title.lower()

def get_release_version(meta_id):
    full_release_version = None
    response = data_rest_api.get_run_metadata(meta_id, ['release_version'])
    if response['success'] and not response['missing_keys']:
        full_release_version = response['metadata']['release_version'].strip()
    return full_release_version

def get_argus_link(event):
    '''
    :param: event: event object
    :return: Argus link for the event
    '''
    argus_video_offset = 10
    start_time = (event.event.timestamp / lib.NANOSECS_IN_SEC) - argus_video_offset
    return 'https://argus.zooxlabs.com/{meta_id}?time={time}'.\
        format(meta_id=event.event.run_id, time=start_time)


def get_event_geofences(event):
    '''
    :param: event: event
    :return: List of geofence names the event occurred within.
    '''
    if event.event.latLonCoordinate is None:
        return []
    return get_entered_geofences([event.event.latLonCoordinate])

def get_fuzzed_labels(tokens, reference_tokens, threshold_score=80):
    '''
    :param: tokens: Set of tokens that could be misspelled
    :param: reference_tokens: Set of tokens that contain correctly
            spelled words.
    :param: threshold_score: Score required to be considered a match

    :return: Set of tuples containing (original_token, correct_token)
            where the original_token comes from the tokens set and
            correct_token comes from the reference_tokens set.
    '''
    missing_tokens = tokens - reference_tokens
    fuzzed_labels = set()
    for token in missing_tokens:
        matches = process.extract(token,reference_tokens, limit=3, scorer=fuzz.ratio)
        for ref_token, score in matches:
            if score > threshold_score:
                # Check the score in the reverse direction to
                # prevent short hashtags from getting
                # a ton of matches
                reverse_score = fuzz.ratio(ref_token, token)
                if reverse_score > threshold_score:
                    fuzzed_labels.add((token, ref_token))

    return fuzzed_labels

def extract_custom_labels(message):
    '''
    Given a message typed out by a swoperator, look for all tokens
    that begin with a hashtag and return a set of those.

    Additionally, look at all tokens in the message, and use
    fuzzy string matching to see if they are close enough to match
    with an existing set of hashtags. If so, add those to this set.
    '''
    message_tokens = message.lower().split()
    initial_hashtags = {tag for tag in message_tokens if tag.startswith('#')}
    fuzzed_labels = get_fuzzed_labels(set(message_tokens), HASHTAGS)
    incorrect_labels = {token for token, ref_token in fuzzed_labels if token.startswith('#')}
    corrected_labels = {ref_token for token, ref_token in fuzzed_labels}
    # Prune out the bad hashtags from the basic_hashtags list
    hashtags = initial_hashtags - incorrect_labels
    return hashtags | corrected_labels

def extract_triage_bucket(event):
    '''
    Given an event object, attempt to deduce what triage bucket it
    belongs to.

    If no bucket tags or more than 1 bucket tag appears, do not return
    any triage bucket. Check if that bucket tag exists in the map and
    return the corresponding triage bucket.
    '''
    if event.event.type != 'DISENGAGEMENT' and event.event.type != 'NOTE':
        return None

    buckets = [tag for tag in event.message.split() if tag.startswith('^')]
    if len(buckets) != 1:
        return None
    return TRIAGE_BUCKET_MAP.get(buckets[0], None)


def generate_nogo_labels(nogo):
    '''
    Parse a nogo object to determine if any special labels
    should be added to the jira ticket.
    '''
    labels = []
    if nogo.source == 'COREDASH' and nogo.end_mode == 'RELEASE':
        # This label indicates Teleop resolved this Nogo.
        labels.append('#to_success')
    return labels


def get_run_events(client, event_metadata, meta_id, event_id):
    '''
    For a run, make a dict whose key is the event_type
    and the value is the list of events of that type.

    :param: client: Client to vehicle-events microservice
    :param: event_metadata: Dict where key is event_type
    and value is number of events of that type in run.
    :param: meta_id: Human readable id for run.

    :return: events: Dict where key is event_type
    and value is list of events of that type.
    '''
    events = {
        'DISENGAGEMENT': [],
        'NOTE': [],
        'COREDUMP': [],
        'NOGO': [],
        'TELEOPS_ENGAGEMENT': [],
        'OFFLINE_NOTE': []
    }
    if 'DISENGAGEMENT' in event_metadata:
        events['DISENGAGEMENT'] = client.disengagement.\
            getDisengagements(runIdentifierList=[meta_id]).result().items

    if 'COREDUMP' in event_metadata:
        events['COREDUMP'] = client.coredump.\
            getCoredumps(runIdentifierList=[meta_id]).result().items

    if 'NOTE' in event_metadata:
        events['NOTE'] = client.note.\
            getNotes(runIdentifierList=[meta_id]).result().items

    if 'NOGO' in event_metadata:
        events['NOGO'] = client.nogo.\
            getNogos(runIdentifierList=[meta_id]).result().items

    if 'TELEOPS_ENGAGEMENT' in event_metadata:
        events['TELEOPS_ENGAGEMENT'] = client.teleops_engagement.\
            getTeleopsEngagements(runIdentifierList=[meta_id]).result().items

    if 'OFFLINE_NOTE' in event_metadata:
        events['OFFLINE_NOTE'] = client.offline_note.\
            getOfflineNotes(runIdentifierList=[meta_id]).result().items

    if event_id:
        for types in event_metadata:
            events[types] = filter(lambda x: x.event.id == event_id, events[types])

    return events


def get_event_timestamps(processor, run_events):
    '''
    Given a RosParamsProcessor and the events from
    a run, return a list of the timestamps (seconds)
    of the notes that need tickets made, and a list
    of the timestamps (seconds) of disengagements that
    need tickets made.
    '''
    params_tar_path = processor.download_params_tar()
    params_tar_extract_dir = processor.extract_params_tar(params_tar_path)

    issue_directory = os.path.join(params_tar_extract_dir, ISSUES_SUBDIR)
    if not os.path.exists(issue_directory):
        logger.warn(
            'Issue directory does not exist in extracted ROSParamsTar: %s', issue_directory)
        logger.warn('Logger likely did not shut down properly.')
        logger.warn(
            'Will get timestamps for unedited notes and disengagements.')
        note_timestamps = [
            note.event.timestamp / lib.NANOSECS_IN_SEC for note in run_events['NOTE']]
        disengagement_timestamps = [disengagement.event.timestamp / lib.NANOSECS_IN_SEC
                                    for disengagement in run_events['DISENGAGEMENT']]
    else:
        note_issue_files = processor.get_issue_files(
            issue_directory, NOTE_TYPE, '.txt')
        disengagement_issue_files = processor.get_issue_files(
            issue_directory, DISENGAGEMENT_TYPE, '.txt')
        note_timestamps = processor.get_file_timestamps(note_issue_files)
        disengagement_timestamps = processor.get_file_timestamps(
            disengagement_issue_files)

    note_timestamps += get_offline_note_timestamps(run_events.get('OFFLINE_NOTE', []))
    return note_timestamps, disengagement_timestamps


def get_offline_note_timestamps(offline_notes):
    '''
    Given a list of offline notes from a run,
    return a list of the timestamps (seconds) of
    the offline notes trhat need tickets made.
    '''
    note_timestamps = [
        note.event.timestamp / lib.NANOSECS_IN_SEC for note in offline_notes]
    return note_timestamps


def get_event_metadata(client, meta_id, event_id):
    '''
    For a given run, parse its events metadata
    and tabulate a list of how many events of each type it has.

    Return a dict where the key is the event_type
    and value is the number of events contained in that run.
    '''
    try:
        if event_id:
            events = client.event.getEvents(eventIDList=[event_id]).result().items
        else:
            events = client.event.getEvents(runIdentifierList=[meta_id]).result().items
    except HTTPNotFound as e:
        logger.warn('The run {} contains no events!'.format(meta_id))
        return {}

    event_metadata = defaultdict(int)
    for metadata in events:
        event_metadata[metadata.type] += 1
    return dict(event_metadata)


def get_event_issue(client, event_id):
    '''
    Search jira for an issue made for a certain event and by a given
    triager. Together these 2 can uniquely identify a classification.

    :param: client: Jira Client
    :param: event_id: UUID (string) for the event
    '''
    query_string = 'project = TRG and \"Vehicle Event ID\" ~ {}'.format(
        event_id)
    return client.search_issues(query_string)


def get_report_from_event_triager(client, event_id, triager):
    '''
    Given an event ID and a triager name, search the vehicle-tickets
    service to see whether a report exists. If not,
    return None.

    :param: client: Vehicle-Tickets client
    :param: event_id: Event UUID
    :param: triager: Name of the Triager
    '''
    try:
        result = client.reports.getReports(
            eventIdList=[event_id],
            triagerList=[triager]).result()
        return result.items[0]
    except HTTPNotFound as e:
        return None

def no_corrupt_chum_in_traffic_light_states_zrn(meta_id, dry_run):
    store = chumpy.Store.createDefault()
    run_range = chumpy.Range.fromRun(meta_id)
    run_range.topics = {TRAFFIC_LIGHT_STATES_ZRN_TOPIC}
    reader = chumpy.Reader.create(store, run_range)
    try:
        msgs = [msg for msg in reader]
    except chumpy.MessageLoadError as e:
        log_chum_error(e, meta_id, logger.warning, dry_run)
        return False
    return True

def run_classifiers(ticket_client, event, triagers):
    '''
    Run an event through all of the non-vanilla triagers' classifiers.

    :param: event: Event Object
    :param: triagers: Dict of Triager Names to Initialized Triagers
        Should always include the Vanilla Triager.
    :param: vanilla_triager: Vanilla Triager initialized for run.
    :return: Dictionary mapping a triager name to the Report object.
    '''
    # Run the classifier on all non-vanilla triagers
    classifier_results = {triager_name: triager.classify(event) for triager_name, triager in triagers.items()
                          if triager_name != VANILLA_TRIAGER}
    activated_triagers = [triager_name for triager_name in triagers.keys()
                          if triager_name != VANILLA_TRIAGER and classifier_results[triager_name].result is True]

    # Vanilla Triager's classifier runs only after all others are done.
    classifier_results[VANILLA_TRIAGER] = triagers[
        VANILLA_TRIAGER].classify(event, activated_triagers)

    # Now store the output each triager classifier in a report object
    reports = {}
    for triager_name, classifier_output in classifier_results.items():
        reports[triager_name] = ticket_client.get_model('Report')(
                      event_id=event.event.id,
                      triager=triager_name,
                      result=classifier_output.result,
                      triager_input=json.dumps(classifier_output.input))

    return reports


def process_reports(event, reports, triagers):
    '''
    For the reports that resulted in True, fill in other
    fields in the report objects.

    :param: event: Event object
    :param: reports: Dict of triager_name to report objects
    :param: triagers: Dict mapping triager_name to triager
    :return: Dict mapping triager_name to report object
    '''
    for triager_name, report in reports.items():
        if report.result:
            analyzer_output = triagers[triager_name].process(event, report)
            report.assignee = analyzer_output.assignee
            report.triager_output = json.dumps(analyzer_output.output)
    return reports


def _override_existing_report(ticket_client, reportId, new_report):
    '''
    Function that attempts to modify an existing report on the Reports
    table with the latest report object generated by a triager.

    :param: ticket_client: Client to vehicle-tickets microservice
    :param: reportId: UUID of the existing report object
    :param: new_report: Report object with updated values.
    '''
    reportPatch = ticket_client.get_model('ReportPatch')(result=new_report.result,
                       assignee=new_report.assignee,
                       triager_input=new_report.triager_input,
                       triager_output=new_report.triager_output)
    try:
        modified_report = ticket_client.reports.modifyReport(
            reportId=reportId, reportPatch=reportPatch).result()
    except Exception as e:
        raise VehicleTicketsError(
            'Could not override an existing report!: {}'.format(str(e)))


def upload_reports(ticket_client, reports, force=False):
    '''
    :param: ticket_client: Client to vehicle-tickets microservice
    :param: reports: List of report objects
    :param: force: Boolean on whether we want to overwrite existing report objects
    :return: List of desired report objects
    '''
    desired_reports = []
    for report in reports:
        try:
            result = ticket_client.reports.addReport(report=report).result()
            desired_reports.append(report)
        except HTTPConflict as e:
            logger.warn("Report for event {} by triager {} already exists".
                        format(report.event_id, report.triager))
            existing_report = get_report_from_event_triager(
                ticket_client, report.event_id, report.triager)
            if not force:
                desired_reports.append(existing_report)
            else:
                # Override the old report with the new one.
                if not existing_report:
                    logger.warn("Report for event {} and triager {} not found!".
                                format(report.event_id, report.triager))
                    raise VehicleTicketsError(
                        'Cannot get a report that is supposed to exist!: {}'.format(str(e)))

                _override_existing_report(ticket_client, existing_report.id, report)
                desired_reports.append(report)

    return desired_reports


def generate_standard_ticket_title(event):
    '''
    Generate the Title for the ticket based on event information
    '''
    event_time_sec = event.event.timestamp / lib.NANOSECS_IN_SEC
    pst_datetime = lib.pacific_datetime(event_time_sec)
    date = pst_datetime.strftime('%Y-%m-%d')
    time = pst_datetime.strftime('%H:%M:%S')
    title = u''
    if event.event.type == 'NOTE':
        title = u'{}: Note: {}'.format(date, event.message)
    elif event.event.type == 'DISENGAGEMENT':
        title = u'{}: Disengagement: {}'.format(date, event.message)
    elif event.event.type == 'COREDUMP':
        title = u'{} crashed on {} at {}'.\
            format(event.executable, date, time)
    elif event.event.type == 'NOGO':
        title = u'{} at {}: Nogo: Started by {}, Ended by {}'.\
            format(date, time, event.source, event.end_source)
    elif event.event.type == 'TELEOPS_ENGAGEMENT':
        title = u'{}: Teleops Engagement: {}'.format(date, event.message)
    elif event.event.type == 'OFFLINE_NOTE':
        title = u'{}: Offline Note: {}'.format(date, event.message)
    return title


def generate_ticket_labels(event, run, reports, triagers):
    '''
    Generate a list of all the labels for a given TRG ticket.
    '''
    labels = ['from_vehicle']
    labels += generate_run_labels(run)
    labels += generate_event_labels(event)
    labels += generate_triager_labels(event, reports, triagers)
    return labels

def generate_run_labels(run):
    '''
    Given a run object, return a list of labels for a ticket
    :param: run object from Data Catalog
    '''
    labels = []
    if is_point_to_point_run(run):
        labels.append('P2PDC')
    mission_config_mode_res = data_rest_api.get_run_metadata(run.get('data_id'),
                                             'mission_config_mode')
    if mission_config_mode_res.get('success') and\
        'mission_config_mode' in mission_config_mode_res.get('metadata', {}):
            labels.append('mission_config_mode:' +\
                          str(mission_config_mode_res['metadata']['mission_config_mode']))
    else:
        logger.warning('Cant get mission_config_mode from the run {}'.format(run.get('data_id')))
    return labels

def generate_disengagement_labels(event):
    '''
    :param: event: Disengagement object
    :return: List of labels
    '''
    labels = []
    if event.reason == 'CRITICAL':
        labels.extend(['UNREVIEWED', 'SAFETY_ACTION'])

    # Add Disengagement Reason as a label, but
    # downgrade in special cases
    reason_label = event.reason
    downgrade_tags = set(['#good', '#training'])
    message_tags = {tag for tag in event.message.lower().split()
                    if tag.startswith('#')}
    if event.reason not in ('REQUESTED', 'CRITICAL') and \
        any(tag in message_tags for tag in downgrade_tags):
            reason_label = 'REQUESTED'

    if event.is_zfr is True:
        labels.append('zfr-report')

    labels.append(reason_label)
    return labels

def generate_teleops_engagement_labels(event):
    '''
    :param: event: Teleops engagement event
    :return: List of labels
    '''
    labels = ['to-report']
    if event.flag_for_review:
        labels.append('to-report-review')
    return labels

def generate_event_labels(event):
    '''
    Return a list of labels based on event information.
    :param: event: event object
    '''
    labels = ['Event:{}'.format(event.event.type),
        event.event.autonomous_mode,
        event.event.vehicle.replace('_', '-')
    ]
    if event.event.type == 'DISENGAGEMENT' or event.event.type == 'NOTE':
        labels += list(extract_custom_labels(event.message))

    if event.event.type == 'NOGO':
        labels += generate_nogo_labels(event)

    if event.event.type == 'DISENGAGEMENT':
        labels += generate_disengagement_labels(event)

    if event.event.type == 'TELEOPS_ENGAGEMENT':
        labels += generate_teleops_engagement_labels(event)

    geofences = get_event_geofences(event)
    if geofences:
        labels += list(geofences)

    if event.event.latLonCoordinate is not None:
        sun_state = sun_utils.getSunState(event.event.latLonCoordinate[0],
                        event.event.latLonCoordinate[1],
                        event.event.timestamp / 1E9)
        labels += [sun_state]

    return labels

def generate_triager_labels(event, reports, triagers):
    '''
    :param: event: event object
    :param: reports: List of all report objects for this event
    :param: triagers: Dict mapping Triager Name to Initialized Triager
    :return: a list of custom labels for the jira ticket
    '''
    labels = []
    active_reports = [report for report in reports if report.result is True]
    # Specially denote if only the Vanilla Triager fired on the event.
    if len(active_reports) == 1 and active_reports[0].triager == VANILLA_TRIAGER:
        labels.append(ONLY_VANILLA)
        # Coredumps that are ONLY_VANILLA need to be deduped.
        if event.event.type == 'COREDUMP':
            labels.append(NOT_DEDUPED)

    for report in active_reports:
        labels.append("Triager:{}".format(report.triager))
        labels += triagers[report.triager].generate_ticket_labels(
            event,
            json.loads(report.triager_input),
            json.loads(report.triager_output))

    # Discard any label > 255 characters since that can't be handled by JIRA.
    final_labels = []
    for label in labels:
        if len(label) > 255:
            logger.warn("Label {} exceeds 255 char and won't be included!".format(label))
        else:
            final_labels.append(label)

    return final_labels


def _generate_mentioned_users(event, color):
    '''
    Generate the Mentioned Users section in ticket description,
    if there are users mentioned in event
    '''
    body = []
    if event.message:
        users = USER_MENTION_PATTERN.findall(event.message)
        if users:
            body.append(templates.CATEGORY_TEMPLATE.format(
                title='Mentioned Users', hex=color))
            content = '*Users*: ' + ', '.join([ '[~'+name+']'
                                                for name in users])
            body.append(templates.SECTION_TEMPLATE.format(section_content=
                                                            content))
    return '\n\n'.join(body)


def generate_description(event, run, reports, triagers, nogo_for_disengagement, issue_key):
    '''
    Generate the Description for the ticket
    based on event and run information
    '''
    body = []
    # Summary Header (Mandatory)
    color = VANILLA_COLOR
    secondary_color = GRAY_COLOR
    body.append(templates.SUMMARY_TEMPLATE.format(hex=color))
    body.append(lib.generate_persistent_scenario_link(event, color, issue_key))

    for report in reports:
        if report.result is True:
            triager_description = triagers[report.triager].generate_ticket_description(run,
                    event,
                    json.loads(report.triager_input),
                    json.loads(report.triager_output))
            body.append(triager_description)

    # Note Section (only if applicable)
    if event.event.type == 'NOTE':
        body.append(lib.generate_note_description(event, color))

    # Disengagement Section (only if applicable)
    if event.event.type == 'DISENGAGEMENT':
        if nogo_for_disengagement:
            body.append(lib.generate_nogo_for_disengagement_description(nogo_for_disengagement, secondary_color, run))
        body.append(lib.generate_disengagement_description(event, color))

    # Nogo Section (only if applicable)
    if event.event.type == 'NOGO':
        body.append(lib.generate_nogo_description(event, color))

    # Teleops Engagement Section (only if applicable)
    if event.event.type == 'TELEOPS_ENGAGEMENT':
        body.append(lib.generate_teleops_engagement_description(event, color))

    # Offline Note Section (only if applicable)
    if event.event.type == 'OFFLINE_NOTE':
        body.append(lib.generate_offline_note_description(event, color))

    body.append(lib.generate_event_description(run, event, color))
    body.append(lib.generate_run_description(run, event, color))
    body.append(lib.generate_code_decription(run, color))

    # CoreDump Section (only if applicable)
    if event.event.type == 'COREDUMP':
        body.append(lib.generate_coredump_description(event, run, color))

    # Mentioned Users Section, can only occur in events with messages
    if event.event.type in ('NOTE', 'DISENGAGEMENT', 'TELEOPS_ENGAGEMENT'):
        body.append(_generate_mentioned_users(event, color))

    return '\n\n'.join(body)


class RosParamsProcessor(object):

    '''
    RosParamsProcessor

    Object encapsulating all the functionality around RosParamsTar
    processing.
    '''
    artifact_download_dir = '/tmp/rosparams_cache/artifacts'

    def __init__(self, meta_id):
        '''
        CoredumpProcessor constructor
        string meta_id: The human readable id describing a run.
        '''
        self._meta_id = meta_id
        self._run_files_memo = None

    @property
    def run_files(self):
        '''
        Memoized copy of Data Catalog `run_files` JSON response dict.
        '''
        if self._run_files_memo is None:
            self._run_files_memo = data_rest_api.get_run_files(self._meta_id)
        if not self._run_files_memo['success']:
            raise DataCatalogQueryError(
                'Could not retrieve run files for {}'.format(self._meta_id))
        return self._run_files_memo['files']

    def artifact_download_subdir(self, subdir=None):
        '''
        Path where Data Catalog artifacts should be downloaded, with the given
        subdirectory.
        '''
        if subdir:
            return os.path.join(self.artifact_download_dir, self._meta_id, subdir)
        return os.path.join(self.artifact_download_dir, self._meta_id)

    def get_issue_files(self, issues_directory, target_str, extension):
        '''
        Given a path to the issues_directory, return the file paths
        to all the of the on-vehicle issue files that end with
        extension and have the target_str in their name.
        '''
        return [
            os.path.join(issues_directory, file)
            for file in os.listdir(issues_directory)
            if file.endswith(extension) and target_str in file
        ]

    def _fetch_artifacts(self, artifacts, subdir=None):
        '''
        Downloads the specified artifacts to the given `subdir`.
        See: `artifact_download_subdir`
        '''
        dest = self.artifact_download_subdir(subdir=subdir)
        mkdir_p_public(dest)
        artifact_names = [os.path.basename(artifact['name'])
                          for artifact in artifacts]
        return [fetch_run_log_by_name(self._meta_id, artifact_name, dest)
                for artifact_name in artifact_names
                ]

    def download_params_tar(self):
        '''
        Downloads the ROSParamsTar to the given `artifact_download_dir`.
        See: `_fetch_artifacts`
        '''
        params_tars = [
            f for f in self.run_files if f['type'] == 'ROSParamsTar']
        if not params_tars:
            raise RunFileIntegrityError(
                'Could not download ROSParamsTar for run with data_id: %s' % self._meta_id)
        if len(params_tars) > 1:
            raise RunFileIntegrityError('Found too many [%d] ROSParamsTar artifacts for run with data_id: %s'
                                        % (len(params_tars), self._meta_id))
        return self._fetch_artifacts(params_tars)[0]

    def extract_params_tar(self, params_tar_path):
        '''
        Extracts the downloaded ROSParamsTar to the `PARAMS_EXTRACT_SUBDIR`.
        '''
        extract_dir = self.artifact_download_subdir(PARAMS_EXTRACT_SUBDIR)
        mkdir_p_public(extract_dir)
        tar_file = tarfile.open(params_tar_path)
        logger.info('Extracting ROSParamsTar to: %s', extract_dir)
        tar_file.extractall(extract_dir)
        make_file_public_recursive(extract_dir)
        return extract_dir

    def get_file_timestamps(self, file_paths):
        '''
        Given a list of filepaths to edited notes or to edited
        disengagements from a run, return the timestamps of the
        notes or disengagements by analyzing the names of the files.
        '''
        timestamps = []
        for file in file_paths:
            name, extension = os.path.splitext(os.path.basename(file))
            tokens = name.split('_')
            timestamps.append(float(tokens[-1]))
        return timestamps


def notify_slack(run_meta_id, jira_id, event_title):
    words = set(event_title.lower().split())
    for kw, channel in SLACK_KEYWORDS.items():
        if kw in words:
            notification = textwrap.dedent('''\
                Jira ticket created for {run_meta_id}:
                https://jira.zooxlabs.com/browse/{jira_id}
                {event_title}
            ''').format(
                run_meta_id=run_meta_id,
                jira_id=jira_id,
                event_title=event_title,
            )
            request_body = {'channel': channel, 'text': notification}
            response = requests.post(
                'https://slack.com/api/chat.postMessage',
                json=request_body,
                headers={
                    'Authorization': 'Bearer {}'.format(SLACK_OAUTH_TOKEN)
                },
            )
            response.raise_for_status()


def extract_failed_line(coredump):
    backtrace = coredump.backtrace
    match = re.search(FAILED_LINE_REGEXP, backtrace)
    if match:
        return match.group(1)


def get_meta_summary(release_version, line):
    '''
    Make a summary (title) for aggregated ticket

    :param: release_version
    :param: line: file:line from the backtrace
    '''
    return "[Meta][Meta_core_dump][{}]{}".format(release_version, line)


def find_meta_issues(jira_client, release_version, line):
    '''
    Search for meta coredump jira ticket for given release version and file:line

    :param: jira_client: Jira Client
    :param: release_version: release version
    :param: line: file:line from the backtrace
    '''
    query_string = "project = TRG AND summary ~ \"Meta\" "\
                   "AND summary ~ \"Meta_core_dump\" "\
                   "AND summary ~ \"{}\" "\
                   "AND summary ~ \"{}\" ".format(release_version, line)
    return jira_client.search_issues(query_string)
