mirror of
https://github.com/iluvcapra/ptulsconv.git
synced 2025-12-31 08:50:48 +00:00
Reorganized commands a little
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
from ptulsconv.commands import convert, dump_field_map, dump_xform_options
|
||||
from ptulsconv.commands import convert, dump_field_map
|
||||
from ptulsconv import __name__, __version__, __author__
|
||||
from optparse import OptionParser, OptionGroup
|
||||
from .reporting import print_status_style, print_banner_style, print_section_header_style, print_fatal_error
|
||||
@@ -37,8 +37,8 @@ def main():
|
||||
'`fmpxml`.)')
|
||||
|
||||
warn_options = OptionGroup(title="Warning and Validation Options", parser=parser)
|
||||
warn_options.add_option('-w', action='store_true', dest='warnings',
|
||||
help='Generate warnings for common errors (missing code numbers etc.)')
|
||||
warn_options.add_option('-W', action='store_false', dest='warnings', default=True,
|
||||
help='Suppress warnings for common errors (missing code numbers etc.)')
|
||||
|
||||
parser.add_option_group(warn_options)
|
||||
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
import io
|
||||
import json
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
import sys
|
||||
from itertools import chain
|
||||
from xml.etree.ElementTree import TreeBuilder, tostring
|
||||
|
||||
import ptulsconv
|
||||
from .reporting import print_section_header_style, print_status_style, print_warning
|
||||
from .validations import *
|
||||
from .xml.common import fmp_dump, fmp_transformed_dump
|
||||
|
||||
from ptulsconv.pdf.supervisor_1pg import output_report as output_supervisor_1pg
|
||||
from ptulsconv.pdf.line_count import output_report as output_line_count
|
||||
from ptulsconv.pdf.talent_sides import output_report as output_talent_sides
|
||||
from ptulsconv.pdf.summary_log import output_report as output_summary
|
||||
|
||||
|
||||
# field_map maps tags in the text export to fields in FMPXMLRESULT
|
||||
# - tuple field 0 is a list of tags, the first tag with contents will be used as source
|
||||
# - tuple field 1 is the field in FMPXMLRESULT
|
||||
@@ -55,83 +52,6 @@ adr_field_map = ((['Title', 'PT.Session.Name'], 'Title', str),
|
||||
(['Movie.Start_Offset_Seconds'], 'Movie Seconds', float),
|
||||
)
|
||||
|
||||
|
||||
def normalize_record_keys(records):
|
||||
for record in records['events']:
|
||||
for field in adr_field_map:
|
||||
spot_keys = field[0]
|
||||
output_key = field[1]
|
||||
field_type = field[2]
|
||||
for attempt_key in spot_keys:
|
||||
if attempt_key in record.keys():
|
||||
record[output_key] = field_type(record[attempt_key])
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def fmp_dump(data, input_file_name, output):
|
||||
doc = TreeBuilder(element_factory=None)
|
||||
|
||||
doc.start('FMPXMLRESULT', {'xmlns': 'http://www.filemaker.com/fmpxmlresult'})
|
||||
|
||||
doc.start('ERRORCODE', {})
|
||||
doc.data('0')
|
||||
doc.end('ERRORCODE')
|
||||
|
||||
doc.start('PRODUCT', {'NAME': ptulsconv.__name__, 'VERSION': ptulsconv.__version__})
|
||||
doc.end('PRODUCT')
|
||||
|
||||
doc.start('DATABASE', {'DATEFORMAT': 'MM/dd/yy', 'LAYOUT': 'summary', 'TIMEFORMAT': 'hh:mm:ss',
|
||||
'RECORDS': str(len(data['events'])), 'NAME': os.path.basename(input_file_name)})
|
||||
doc.end('DATABASE')
|
||||
|
||||
doc.start('METADATA', {})
|
||||
for field in adr_field_map:
|
||||
tp = field[2]
|
||||
ft = 'TEXT'
|
||||
if tp is int or tp is float:
|
||||
ft = 'NUMBER'
|
||||
|
||||
doc.start('FIELD', {'EMPTYOK': 'YES', 'MAXREPEAT': '1', 'NAME': field[1], 'TYPE': ft})
|
||||
doc.end('FIELD')
|
||||
doc.end('METADATA')
|
||||
|
||||
doc.start('RESULTSET', {'FOUND': str(len(data['events']))})
|
||||
for event in data['events']:
|
||||
doc.start('ROW', {})
|
||||
for field in adr_field_map:
|
||||
doc.start('COL', {})
|
||||
doc.start('DATA', {})
|
||||
for key_attempt in field[0]:
|
||||
if key_attempt in event.keys():
|
||||
doc.data(str(event[key_attempt]))
|
||||
break
|
||||
doc.end('DATA')
|
||||
doc.end('COL')
|
||||
doc.end('ROW')
|
||||
doc.end('RESULTSET')
|
||||
|
||||
doc.end('FMPXMLRESULT')
|
||||
docelem = doc.close()
|
||||
xmlstr = tostring(docelem, encoding='unicode', method='xml')
|
||||
output.write(xmlstr)
|
||||
|
||||
|
||||
import glob
|
||||
|
||||
xslt_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt')
|
||||
|
||||
def xform_options():
|
||||
return glob.glob(os.path.join(xslt_path, "*.xsl"))
|
||||
|
||||
def dump_xform_options(output=sys.stdout):
|
||||
print("# Available transforms:", file=output)
|
||||
print("# Transform dir: %s" % (xslt_path), file=output)
|
||||
for f in xform_options():
|
||||
base = os.path.basename(f)
|
||||
name, _ = os.path.splitext(base)
|
||||
print("# " + name, file=output)
|
||||
|
||||
def dump_field_map(field_map_name, output=sys.stdout):
|
||||
output.write("# Map of Tag fields to XML output columns\n")
|
||||
output.write("# (in order of precedence)\n")
|
||||
@@ -149,26 +69,22 @@ def dump_field_map(field_map_name, output=sys.stdout):
|
||||
for tag in field[0]:
|
||||
output.write("# %-27s-> %-20s | %-8s| %-7i\n" % (tag[:27], field[1][:20], field[2].__name__, n + 1))
|
||||
|
||||
def normalize_record_keys(records):
|
||||
for record in records['events']:
|
||||
for field in adr_field_map:
|
||||
spot_keys = field[0]
|
||||
output_key = field[1]
|
||||
field_type = field[2]
|
||||
for attempt_key in spot_keys:
|
||||
if attempt_key in record.keys():
|
||||
record[output_key] = field_type(record[attempt_key])
|
||||
|
||||
def fmp_transformed_dump(data, input_file, xsl_name, output):
|
||||
pipe = io.StringIO()
|
||||
print_status_style("Generating base XML")
|
||||
fmp_dump(data, input_file, pipe)
|
||||
|
||||
strdata = pipe.getvalue()
|
||||
print_status_style("Base XML size %i" % (len(strdata)))
|
||||
|
||||
print_status_style("Running xsltproc")
|
||||
|
||||
xsl_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt', xsl_name + ".xsl")
|
||||
print_status_style("Using xsl: %s" % (xsl_path))
|
||||
subprocess.run(['xsltproc', xsl_path, '-'], input=strdata, text=True,
|
||||
stdout=output, shell=False, check=True)
|
||||
return records
|
||||
|
||||
|
||||
def convert(input_file, output_format='fmpxml', start=None, end=None, select_reel=None,
|
||||
progress=False, include_muted=False, xsl=None,
|
||||
output=sys.stdout, log_output=sys.stderr, warnings=False, spelling=False):
|
||||
output=sys.stdout, log_output=sys.stderr, warnings=True, spelling=False):
|
||||
with open(input_file, 'r') as file:
|
||||
print_section_header_style('Parsing')
|
||||
ast = ptulsconv.protools_text_export_grammar.parse(file.read())
|
||||
@@ -203,12 +119,12 @@ def convert(input_file, output_format='fmpxml', start=None, end=None, select_ree
|
||||
reel_xform = ptulsconv.transformations.SelectReel(reel_num=select_reel)
|
||||
parsed = reel_xform.transform(parsed)
|
||||
|
||||
parsed = normalize_record_keys(parsed)
|
||||
|
||||
if warnings:
|
||||
for warning in chain(validate_unique_field(parsed, field='QN'),
|
||||
validate_non_empty_field(parsed, field='QN'),
|
||||
validate_non_empty_field(parsed, field='CN'),
|
||||
validate_non_empty_field(parsed, field='Char'),
|
||||
validate_non_empty_field(parsed, field='Title'),
|
||||
validate_dependent_value(parsed, key_field='CN',
|
||||
dependent_field='Char'),
|
||||
@@ -221,19 +137,18 @@ def convert(input_file, output_format='fmpxml', start=None, end=None, select_ree
|
||||
print_warning(warning.report_message())
|
||||
|
||||
if output_format == 'json':
|
||||
json.dump(normalize_record_keys(parsed), output)
|
||||
json.dump(parsed, output)
|
||||
elif output_format == 'full':
|
||||
print("Sorry, the `full` output type is not yet supported.")
|
||||
normalized_records = normalize_record_keys(parsed)
|
||||
|
||||
output_supervisor_1pg(normalized_records)
|
||||
output_talent_sides(normalized_records)
|
||||
output_line_count(normalized_records)
|
||||
output_summary(normalized_records)
|
||||
output_supervisor_1pg(parsed)
|
||||
output_talent_sides(parsed)
|
||||
output_line_count(parsed)
|
||||
output_summary(parsed)
|
||||
|
||||
elif output_format == 'fmpxml':
|
||||
if xsl is None:
|
||||
fmp_dump(parsed, input_file, output)
|
||||
fmp_dump(parsed, input_file, output, adr_field_map)
|
||||
else:
|
||||
print_section_header_style("Performing XSL Translation")
|
||||
print_status_style("Using builtin translation: %s" % (xsl))
|
||||
|
||||
@@ -11,7 +11,7 @@ class ValidationError:
|
||||
return f"{self.message}: event at {self.event['PT.Clip.Start']} on track {self.event['PT.Track.Name']}"
|
||||
|
||||
def validate_unique_count(input_dict, field='Title', count=1):
|
||||
values = set(list(map(lambda e: e[field], input_dict['events'])))
|
||||
values = set(list(map(lambda e: e.get(field, None), input_dict['events'])))
|
||||
if len(values) > count:
|
||||
yield ValidationError(message="Field {} has too many values (max={}): {}".format(field, count, values))
|
||||
|
||||
|
||||
94
ptulsconv/xml/common.py
Normal file
94
ptulsconv/xml/common.py
Normal file
@@ -0,0 +1,94 @@
|
||||
import os
|
||||
import os.path
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import ptulsconv as ptulsconv
|
||||
|
||||
from xml.etree.ElementTree import TreeBuilder, tostring
|
||||
|
||||
import io
|
||||
|
||||
|
||||
def fmp_dump(data, input_file_name, output, adr_field_map):
|
||||
doc = TreeBuilder(element_factory=None)
|
||||
|
||||
doc.start('FMPXMLRESULT', {'xmlns': 'http://www.filemaker.com/fmpxmlresult'})
|
||||
|
||||
doc.start('ERRORCODE', {})
|
||||
doc.data('0')
|
||||
doc.end('ERRORCODE')
|
||||
|
||||
doc.start('PRODUCT', {'NAME': ptulsconv.__name__, 'VERSION': ptulsconv.__version__})
|
||||
doc.end('PRODUCT')
|
||||
|
||||
doc.start('DATABASE', {'DATEFORMAT': 'MM/dd/yy', 'LAYOUT': 'summary', 'TIMEFORMAT': 'hh:mm:ss',
|
||||
'RECORDS': str(len(data['events'])), 'NAME': os.path.basename(input_file_name)})
|
||||
doc.end('DATABASE')
|
||||
|
||||
doc.start('METADATA', {})
|
||||
for field in adr_field_map:
|
||||
tp = field[2]
|
||||
ft = 'TEXT'
|
||||
if tp is int or tp is float:
|
||||
ft = 'NUMBER'
|
||||
|
||||
doc.start('FIELD', {'EMPTYOK': 'YES', 'MAXREPEAT': '1', 'NAME': field[1], 'TYPE': ft})
|
||||
doc.end('FIELD')
|
||||
doc.end('METADATA')
|
||||
|
||||
doc.start('RESULTSET', {'FOUND': str(len(data['events']))})
|
||||
for event in data['events']:
|
||||
doc.start('ROW', {})
|
||||
for field in adr_field_map:
|
||||
doc.start('COL', {})
|
||||
doc.start('DATA', {})
|
||||
for key_attempt in field[0]:
|
||||
if key_attempt in event.keys():
|
||||
doc.data(str(event[key_attempt]))
|
||||
break
|
||||
doc.end('DATA')
|
||||
doc.end('COL')
|
||||
doc.end('ROW')
|
||||
doc.end('RESULTSET')
|
||||
|
||||
doc.end('FMPXMLRESULT')
|
||||
docelem = doc.close()
|
||||
xmlstr = tostring(docelem, encoding='unicode', method='xml')
|
||||
output.write(xmlstr)
|
||||
|
||||
|
||||
import glob
|
||||
|
||||
xslt_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt')
|
||||
|
||||
def xform_options():
|
||||
return glob.glob(os.path.join(xslt_path, "*.xsl"))
|
||||
|
||||
def dump_xform_options(output=sys.stdout):
|
||||
print("# Available transforms:", file=output)
|
||||
print("# Transform dir: %s" % (xslt_path), file=output)
|
||||
for f in xform_options():
|
||||
base = os.path.basename(f)
|
||||
name, _ = os.path.splitext(base)
|
||||
print("# " + name, file=output)
|
||||
|
||||
|
||||
def fmp_transformed_dump(data, input_file, xsl_name, output):
|
||||
from ptulsconv.reporting import print_status_style
|
||||
|
||||
pipe = io.StringIO()
|
||||
|
||||
print_status_style("Generating base XML")
|
||||
fmp_dump(data, input_file, pipe)
|
||||
|
||||
strdata = pipe.getvalue()
|
||||
print_status_style("Base XML size %i" % (len(strdata)))
|
||||
|
||||
print_status_style("Running xsltproc")
|
||||
|
||||
xsl_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt', xsl_name + ".xsl")
|
||||
print_status_style("Using xsl: %s" % (xsl_path))
|
||||
subprocess.run(['xsltproc', xsl_path, '-'], input=strdata, text=True,
|
||||
stdout=output, shell=False, check=True)
|
||||
Reference in New Issue
Block a user