mirror of
https://github.com/iluvcapra/ptulsconv.git
synced 2025-12-31 08:50:48 +00:00
Implementing validation feature
This commit is contained in:
@@ -2,6 +2,6 @@ from .ptuls_grammar import protools_text_export_grammar
|
|||||||
from .ptuls_parser_visitor import DictionaryParserVisitor
|
from .ptuls_parser_visitor import DictionaryParserVisitor
|
||||||
from .transformations import TimecodeInterpreter
|
from .transformations import TimecodeInterpreter
|
||||||
|
|
||||||
__version__ = '0.5.3'
|
__version__ = '0.6.0'
|
||||||
__author__ = 'Jamie Hardt'
|
__author__ = 'Jamie Hardt'
|
||||||
__license__ = 'MIT'
|
__license__ = 'MIT'
|
||||||
|
|||||||
@@ -29,6 +29,15 @@ def main():
|
|||||||
|
|
||||||
parser.add_option_group(filter_opts)
|
parser.add_option_group(filter_opts)
|
||||||
|
|
||||||
|
warn_options = OptionGroup(title="Warning and Validation Options", parser=parser)
|
||||||
|
warn_options.add_option('-W', action='store_true', dest='warnings',
|
||||||
|
help='Generate warnings for common errors (missing code numbers etc.)')
|
||||||
|
|
||||||
|
warn_options.add_option('-S', action='store_true', dest='spelling',
|
||||||
|
help='Check spelling and warn on misspellings.')
|
||||||
|
|
||||||
|
parser.add_option_group(warn_options)
|
||||||
|
|
||||||
output_opts = OptionGroup(title="Output Options", parser=parser)
|
output_opts = OptionGroup(title="Output Options", parser=parser)
|
||||||
output_opts.add_option('--json', default=False, action='store_true', dest='write_json',
|
output_opts.add_option('--json', default=False, action='store_true', dest='write_json',
|
||||||
help='Output a JSON document instead of XML. If this option is enabled, --xform will have '
|
help='Output a JSON document instead of XML. If this option is enabled, --xform will have '
|
||||||
@@ -90,7 +99,8 @@ def main():
|
|||||||
|
|
||||||
convert(input_file=args[1], output_format=output_format, start=options.in_time, end=options.out_time,
|
convert(input_file=args[1], output_format=output_format, start=options.in_time, end=options.out_time,
|
||||||
include_muted=options.include_muted, xsl=options.xslt, select_reel=options.select_reel,
|
include_muted=options.include_muted, xsl=options.xslt, select_reel=options.select_reel,
|
||||||
progress=False, output=sys.stdout, log_output=sys.stderr)
|
progress=False, output=sys.stdout, log_output=sys.stderr,
|
||||||
|
warnings=options.warnings, spelling=options.spelling)
|
||||||
except FileNotFoundError as e:
|
except FileNotFoundError as e:
|
||||||
print_fatal_error("Error trying to read input file")
|
print_fatal_error("Error trying to read input file")
|
||||||
raise e
|
raise e
|
||||||
|
|||||||
@@ -7,8 +7,10 @@ from xml.etree.ElementTree import TreeBuilder, tostring
|
|||||||
import subprocess
|
import subprocess
|
||||||
import pathlib
|
import pathlib
|
||||||
import ptulsconv
|
import ptulsconv
|
||||||
|
from itertools import chain
|
||||||
|
|
||||||
from .reporting import print_section_header_style, print_status_style
|
from .reporting import print_section_header_style, print_status_style, print_warning
|
||||||
|
from .validations import *
|
||||||
|
|
||||||
# field_map maps tags in the text export to fields in FMPXMLRESULT
|
# field_map maps tags in the text export to fields in FMPXMLRESULT
|
||||||
# - tuple field 0 is a list of tags, the first tag with contents will be used as source
|
# - tuple field 0 is a list of tags, the first tag with contents will be used as source
|
||||||
@@ -143,12 +145,12 @@ def fmp_transformed_dump(data, input_file, xsl_name, output):
|
|||||||
xsl_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt', xsl_name + ".xsl")
|
xsl_path = os.path.join(pathlib.Path(__file__).parent.absolute(), 'xslt', xsl_name + ".xsl")
|
||||||
print_status_style("Using xsl: %s" % (xsl_path))
|
print_status_style("Using xsl: %s" % (xsl_path))
|
||||||
subprocess.run(['xsltproc', xsl_path, '-'], input=strdata, text=True,
|
subprocess.run(['xsltproc', xsl_path, '-'], input=strdata, text=True,
|
||||||
stdout=output, shell=False, check=True)
|
stdout=output, shell=False, check=True)
|
||||||
|
|
||||||
|
|
||||||
def convert(input_file, output_format='fmpxml', start=None, end=None, select_reel=None,
|
def convert(input_file, output_format='fmpxml', start=None, end=None, select_reel=None,
|
||||||
progress=False, include_muted=False, xsl=None,
|
progress=False, include_muted=False, xsl=None,
|
||||||
output=sys.stdout, log_output=sys.stderr):
|
output=sys.stdout, log_output=sys.stderr, warnings=False, spelling=False):
|
||||||
with open(input_file, 'r') as file:
|
with open(input_file, 'r') as file:
|
||||||
print_section_header_style('Parsing')
|
print_section_header_style('Parsing')
|
||||||
ast = ptulsconv.protools_text_export_grammar.parse(file.read())
|
ast = ptulsconv.protools_text_export_grammar.parse(file.read())
|
||||||
@@ -184,6 +186,16 @@ def convert(input_file, output_format='fmpxml', start=None, end=None, select_ree
|
|||||||
parsed = reel_xform.transform(parsed)
|
parsed = reel_xform.transform(parsed)
|
||||||
|
|
||||||
|
|
||||||
|
if warnings:
|
||||||
|
for warning in chain(validate_unique_field(parsed, field='QN'),
|
||||||
|
validate_non_empty_field(parsed, field='QN'),
|
||||||
|
validate_non_empty_field(parsed, field='CN'),
|
||||||
|
validate_non_empty_field(parsed, field='Char'),
|
||||||
|
validate_dependent_value(parsed, key_field='CN', dependent_field='Char'),
|
||||||
|
validate_dependent_value(parsed, key_field='CN', dependent_field='Actor'),):
|
||||||
|
|
||||||
|
print_warning(warning.report_message())
|
||||||
|
|
||||||
if output_format == 'json':
|
if output_format == 'json':
|
||||||
json.dump(parsed, output)
|
json.dump(parsed, output)
|
||||||
elif output_format == 'fmpxml':
|
elif output_format == 'fmpxml':
|
||||||
|
|||||||
13
ptulsconv/movie_export.py
Normal file
13
ptulsconv/movie_export.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
import ffmpeg # ffmpeg-python
|
||||||
|
|
||||||
|
|
||||||
|
def create_movie(event):
|
||||||
|
start = event['Movie.Start_Offset_Seconds']
|
||||||
|
duration = event['PT.Clip.Finish_Seconds'] - event['PT.Clip.Start_Seconds']
|
||||||
|
input_movie = event['Movie.Filename']
|
||||||
|
print("Will make movie starting at {}, dur {} from movie {}".format(start, duration, input_movie))
|
||||||
|
|
||||||
|
|
||||||
|
def export_movies(events):
|
||||||
|
for event in events:
|
||||||
|
create_movie(event)
|
||||||
@@ -12,12 +12,21 @@ def print_section_header_style(str):
|
|||||||
else:
|
else:
|
||||||
sys.stderr.write("%s\n\n" % str)
|
sys.stderr.write("%s\n\n" % str)
|
||||||
|
|
||||||
|
|
||||||
def print_status_style(str):
|
def print_status_style(str):
|
||||||
if sys.stderr.isatty():
|
if sys.stderr.isatty():
|
||||||
sys.stderr.write("\033[3m - %s\033[0m\n" % str)
|
sys.stderr.write("\033[3m - %s\033[0m\n" % str)
|
||||||
else:
|
else:
|
||||||
sys.stderr.write(" - %s\n" % str)
|
sys.stderr.write(" - %s\n" % str)
|
||||||
|
|
||||||
|
|
||||||
|
def print_warning(warning_string):
|
||||||
|
if sys.stderr.isatty():
|
||||||
|
sys.stderr.write("\033[3m - %s\033[0m\n" % warning_string)
|
||||||
|
else:
|
||||||
|
sys.stderr.write(" - %s\n" % warning_string)
|
||||||
|
|
||||||
|
|
||||||
def print_advisory_tagging_error(failed_string, position, parent_track_name=None, clip_time=None):
|
def print_advisory_tagging_error(failed_string, position, parent_track_name=None, clip_time=None):
|
||||||
if sys.stderr.isatty():
|
if sys.stderr.isatty():
|
||||||
sys.stderr.write("\n")
|
sys.stderr.write("\n")
|
||||||
|
|||||||
54
ptulsconv/validations.py
Normal file
54
ptulsconv/validations.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from sys import stderr
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ValidationError:
|
||||||
|
message: str
|
||||||
|
event: dict
|
||||||
|
|
||||||
|
def report_message(self):
|
||||||
|
return f"{self.message}: event at {self.event['PT.Clip.Start']} on track {self.event['PT.Track.Name']}"
|
||||||
|
|
||||||
|
def validate_value(input_dict, key_field, predicate):
|
||||||
|
for event in input_dict['events']:
|
||||||
|
val = event[key_field]
|
||||||
|
if not predicate(val):
|
||||||
|
yield ValidationError(message='Field {} not in range'.format(val),
|
||||||
|
event=event)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_unique_field(input_dict, field='QN'):
|
||||||
|
values = set()
|
||||||
|
for event in input_dict['events']:
|
||||||
|
if event[field] in values:
|
||||||
|
yield ValidationError(message='Re-used {}'.format(field), event=event)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_non_empty_field(input_dict, field='QN'):
|
||||||
|
for event in input_dict['events']:
|
||||||
|
if field not in event.keys() or len(event[field]) == 0:
|
||||||
|
yield ValidationError(message='Empty field {}'.format(field), event=event)
|
||||||
|
|
||||||
|
|
||||||
|
def validate_dependent_value(input_dict, key_field, dependent_field):
|
||||||
|
"""
|
||||||
|
Validates that two events with the same value in `key_field` always have the
|
||||||
|
same value in `dependent_field`
|
||||||
|
"""
|
||||||
|
value_map = dict()
|
||||||
|
for event in input_dict['events']:
|
||||||
|
if key_field not in event.keys():
|
||||||
|
continue
|
||||||
|
|
||||||
|
if event[key_field] not in value_map.keys():
|
||||||
|
value_map[event[key_field]] = event.get(dependent_field, None)
|
||||||
|
else:
|
||||||
|
if value_map[event[key_field]] != event.get(dependent_field, None):
|
||||||
|
yield ValidationError(message='Field {} depends on key field {} (value={}), expected {}, was {}'
|
||||||
|
.format(dependent_field, key_field, event[key_field], value_map[key_field],
|
||||||
|
event.get(dependent_field, None)), event=event)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Reference in New Issue
Block a user