refactorings

This commit is contained in:
Jamie Hardt
2021-06-06 16:28:44 -07:00
parent 80305f6098
commit 898fd96808
3 changed files with 63 additions and 39 deletions

View File

@@ -75,8 +75,8 @@ def main():
sys.exit(22) sys.exit(22)
try: try:
output_format = options.output_format major_mode = options.output_format
convert(input_file=args[1], output_format=output_format, warnings=options.warnings) convert(input_file=args[1], major_mode=major_mode, warnings=options.warnings)
except FileNotFoundError as e: except FileNotFoundError as e:
print_fatal_error("Error trying to read input file") print_fatal_error("Error trying to read input file")

View File

@@ -7,7 +7,7 @@ import csv
from typing import List from typing import List
import ptulsconv import ptulsconv
from .docparser.adr_entity import make_entity from .docparser.adr_entity import make_entity, GenericEvent, make_entities
from .reporting import print_section_header_style, print_status_style, print_warning from .reporting import print_section_header_style, print_status_style, print_warning
from .validations import * from .validations import *
@@ -117,23 +117,23 @@ def create_adr_reports(lines: List[ADRLine], tc_display_format: TimecodeFormat,
output_talent_sides(lines, tc_display_format=tc_display_format) output_talent_sides(lines, tc_display_format=tc_display_format)
def parse_text_export(file): # def parse_text_export(file):
ast = ptulsconv.protools_text_export_grammar.parse(file.read()) # ast = ptulsconv.protools_text_export_grammar.parse(file.read())
dict_parser = ptulsconv.DictionaryParserVisitor() # dict_parser = ptulsconv.DictionaryParserVisitor()
parsed = dict_parser.visit(ast) # parsed = dict_parser.visit(ast)
print_status_style('Session title: %s' % parsed['header']['session_name']) # print_status_style('Session title: %s' % parsed['header']['session_name'])
print_status_style('Session timecode format: %f' % parsed['header']['timecode_format']) # print_status_style('Session timecode format: %f' % parsed['header']['timecode_format'])
print_status_style('Fount %i tracks' % len(parsed['tracks'])) # print_status_style('Fount %i tracks' % len(parsed['tracks']))
print_status_style('Found %i markers' % len(parsed['markers'])) # print_status_style('Found %i markers' % len(parsed['markers']))
return parsed # return parsed
def convert(input_file, output_format='fmpxml', output=sys.stdout, warnings=True): def convert(input_file, major_mode='fmpxml', output=sys.stdout, warnings=True):
session = parse_document(input_file) session = parse_document(input_file)
session_tc_format = session.header.timecode_format session_tc_format = session.header.timecode_format
if output_format == 'raw': if major_mode == 'raw':
output.write(MyEncoder().encode(session)) output.write(MyEncoder().encode(session))
else: else:
@@ -141,38 +141,45 @@ def convert(input_file, output_format='fmpxml', output=sys.stdout, warnings=True
compiler.session = session compiler.session = session
compiled_events = list(compiler.compile_events()) compiled_events = list(compiler.compile_events())
# TODO: Breakdown by titles if major_mode == 'tagged':
if output_format == 'tagged':
output.write(MyEncoder().encode(compiled_events)) output.write(MyEncoder().encode(compiled_events))
else: else:
events = list(map(make_entity, compiled_events)) generic_events, adr_lines = make_entities(compiled_events)
lines = [event for event in events if isinstance(event, ADRLine)]
# TODO: Breakdown by titles
titles = set([x.title for x in (generic_events + adr_lines)])
assert len(titles) == 1, "Multiple titles per export is not supported"
print(titles)
if warnings: if warnings:
for warning in chain(validate_unique_field(lines, perform_adr_validations(adr_lines)
field='cue_number',
scope='title'),
validate_non_empty_field(lines,
field='cue_number'),
validate_non_empty_field(lines,
field='character_id'),
validate_non_empty_field(lines,
field='title'),
validate_dependent_value(lines,
key_field='character_id',
dependent_field='character_name'),
validate_dependent_value(lines,
key_field='character_id',
dependent_field='actor_name')):
print_warning(warning.report_message())
if output_format == 'doc':
if major_mode == 'doc':
reels = sorted([r for r in compiler.compile_all_time_spans() if r[0] == 'Reel'], reels = sorted([r for r in compiler.compile_all_time_spans() if r[0] == 'Reel'],
key=lambda x: x[2]) key=lambda x: x[2])
create_adr_reports(lines, tc_display_format=session_tc_format, create_adr_reports(adr_lines,
tc_display_format=session_tc_format,
reel_list=sorted(reels)) reel_list=sorted(reels))
def perform_adr_validations(lines):
for warning in chain(validate_unique_field(lines,
field='cue_number',
scope='title'),
validate_non_empty_field(lines,
field='cue_number'),
validate_non_empty_field(lines,
field='character_id'),
validate_non_empty_field(lines,
field='title'),
validate_dependent_value(lines,
key_field='character_id',
dependent_field='character_name'),
validate_dependent_value(lines,
key_field='character_id',
dependent_field='actor_name')):
print_warning(warning.report_message())

View File

@@ -1,10 +1,27 @@
from ptulsconv.docparser.tag_compiler import Event from ptulsconv.docparser.tag_compiler import Event
from typing import Optional from typing import Optional, List, Tuple
from dataclasses import dataclass from dataclasses import dataclass
from fractions import Fraction from fractions import Fraction
from ptulsconv.docparser.tag_mapping import TagMapping from ptulsconv.docparser.tag_mapping import TagMapping
def make_entities(from_events: List[Event]) -> Tuple[List['GenericEvent'], List['ADRLine']]:
generic_events = list()
adr_lines = list()
for event in from_events:
result = make_entity(event)
if type(result) is ADRLine:
result: ADRLine
adr_lines.append(result)
elif type(result) is GenericEvent:
result: GenericEvent
generic_events.append(result)
return generic_events, adr_lines
def make_entity(from_event: Event) -> Optional[object]: def make_entity(from_event: Event) -> Optional[object]:
instance = GenericEvent instance = GenericEvent
tag_map = GenericEvent.tag_mapping tag_map = GenericEvent.tag_mapping