Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 65e352e8a4 | |||
| 38083d8a46 | |||
| ce676dea72 | |||
| 8548cc1f0f | |||
| 63202b53f1 | |||
| 2836a40616 | |||
| 19a8c2b997 | |||
| 217cd87feb |
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.venv/
|
||||||
@@ -73,7 +73,7 @@ class readable_dir(argparse.Action):
|
|||||||
# check distance values are valid
|
# check distance values are valid
|
||||||
class distance_values(argparse.Action):
|
class distance_values(argparse.Action):
|
||||||
def __call__(self, parser, namespace, values, option_string=None):
|
def __call__(self, parser, namespace, values, option_string=None):
|
||||||
m = re.match('^(\d+)\s?(m|km)$', values)
|
m = re.match(r'^(\d+)\s?(m|km)$', values)
|
||||||
if m:
|
if m:
|
||||||
# convert to int in meters
|
# convert to int in meters
|
||||||
values = int(m.group(1))
|
values = int(m.group(1))
|
||||||
@@ -128,7 +128,7 @@ def reverseGeolocateInit(longitude, latitude):
|
|||||||
'error_message': ''
|
'error_message': ''
|
||||||
}
|
}
|
||||||
# error if long/lat is not valid
|
# error if long/lat is not valid
|
||||||
latlong_re = re.compile('^\d+\.\d+$')
|
latlong_re = re.compile(r'^\d+\.\d+$')
|
||||||
if not latlong_re.match(str(longitude)) or not latlong_re.match(str(latitude)):
|
if not latlong_re.match(str(longitude)) or not latlong_re.match(str(latitude)):
|
||||||
geolocation['status'] = 'ERROR'
|
geolocation['status'] = 'ERROR'
|
||||||
geolocation['error_message'] = 'Latitude {} or Longitude {} are not valid'.format(latitude, longitude)
|
geolocation['error_message'] = 'Latitude {} or Longitude {} are not valid'.format(latitude, longitude)
|
||||||
@@ -201,7 +201,7 @@ def reverseGeolocateOpenStreetMap(longitude, latitude):
|
|||||||
# dict with location, city, state, country, country code
|
# dict with location, city, state, country, country code
|
||||||
# if not fillable, entry is empty
|
# if not fillable, entry is empty
|
||||||
# SAMPLE: http://maps.googleapis.com/maps/api/geocode/json?latlng=<latitude>,<longitude>&language=<lang>&sensor=false&key=<api key>
|
# SAMPLE: http://maps.googleapis.com/maps/api/geocode/json?latlng=<latitude>,<longitude>&language=<lang>&sensor=false&key=<api key>
|
||||||
def reverseGeolocateGoogle(longitude, latitude):
|
def reverseGeolocateGoogle(longitude, latitude): # noqa: C901
|
||||||
# init
|
# init
|
||||||
geolocation = reverseGeolocateInit(longitude, latitude)
|
geolocation = reverseGeolocateInit(longitude, latitude)
|
||||||
temp_geolocation = geolocation.copy()
|
temp_geolocation = geolocation.copy()
|
||||||
@@ -238,7 +238,7 @@ def reverseGeolocateGoogle(longitude, latitude):
|
|||||||
'CountryCode': ['country'],
|
'CountryCode': ['country'],
|
||||||
'Country': ['country'],
|
'Country': ['country'],
|
||||||
'State': ['administrative_area_level_1', 'administrative_area_level_2'],
|
'State': ['administrative_area_level_1', 'administrative_area_level_2'],
|
||||||
'City': ['locality'],
|
'City': ['locality', 'administrative_area_level_3'],
|
||||||
'Location': ['sublocality_level_1', 'sublocality_level_2', 'route'],
|
'Location': ['sublocality_level_1', 'sublocality_level_2', 'route'],
|
||||||
}
|
}
|
||||||
# print("Error: {}".format(response.json()['status']))
|
# print("Error: {}".format(response.json()['status']))
|
||||||
@@ -328,7 +328,7 @@ def convertLongToDMS(lat_long):
|
|||||||
# number used in google/lr internal
|
# number used in google/lr internal
|
||||||
def longLatReg(longitude, latitude):
|
def longLatReg(longitude, latitude):
|
||||||
# regex
|
# regex
|
||||||
latlong_re = re.compile('^(\d+),(\d+\.\d+)([NESW]{1})$')
|
latlong_re = re.compile(r'^(\d+),(\d+\.\d+)([NESW]{1})$')
|
||||||
# dict for loop
|
# dict for loop
|
||||||
lat_long = {
|
lat_long = {
|
||||||
'longitude': longitude,
|
'longitude': longitude,
|
||||||
@@ -520,7 +520,7 @@ def formatLen(string, length):
|
|||||||
# RETURN: number found in the BK string or 0 for none
|
# RETURN: number found in the BK string or 0 for none
|
||||||
# DESC : gets the BK number for sorting in the file list
|
# DESC : gets the BK number for sorting in the file list
|
||||||
def fileSortNumber(file):
|
def fileSortNumber(file):
|
||||||
m = re.match('.*\.BK\.(\d+)\.xmp$', file)
|
m = re.match(r'.*\.BK\.(\d+)\.xmp$', file)
|
||||||
return int(m.group(1)) if m is not None else 0
|
return int(m.group(1)) if m is not None else 0
|
||||||
|
|
||||||
|
|
||||||
@@ -644,137 +644,178 @@ parser = argparse.ArgumentParser(
|
|||||||
|
|
||||||
# xmp folder (or folders), or file (or files)
|
# xmp folder (or folders), or file (or files)
|
||||||
# note that the target directory or file needs to be writeable
|
# note that the target directory or file needs to be writeable
|
||||||
parser.add_argument('-i', '--include-source',
|
parser.add_argument(
|
||||||
required=True,
|
'-i',
|
||||||
nargs='*',
|
'--include-source',
|
||||||
action=writable_dir_folder,
|
required=True,
|
||||||
dest='xmp_sources',
|
nargs='*',
|
||||||
metavar='XMP SOURCE FOLDER',
|
action=writable_dir_folder,
|
||||||
help='The source folder or folders with the XMP files that need reverse geo encoding to be set. Single XMP files can be given here'
|
dest='xmp_sources',
|
||||||
)
|
metavar='XMP SOURCE FOLDER',
|
||||||
|
help='The source folder or folders with the XMP files that need reverse geo encoding to be set. Single XMP files can be given here'
|
||||||
|
)
|
||||||
# exclude folders
|
# exclude folders
|
||||||
parser.add_argument('-x', '--exclude-source',
|
parser.add_argument(
|
||||||
nargs='*',
|
'-x',
|
||||||
action=writable_dir_folder,
|
'--exclude-source',
|
||||||
dest='exclude_sources',
|
nargs='*',
|
||||||
metavar='EXCLUDE XMP SOURCE FOLDER',
|
action=writable_dir_folder,
|
||||||
help='Folders and files that will be excluded.'
|
dest='exclude_sources',
|
||||||
)
|
metavar='EXCLUDE XMP SOURCE FOLDER',
|
||||||
|
help='Folders and files that will be excluded.'
|
||||||
|
)
|
||||||
|
|
||||||
# LR database (base folder)
|
# LR database (base folder)
|
||||||
# get .lrcat file in this folder
|
# get .lrcat file in this folder
|
||||||
parser.add_argument('-l', '--lightroom',
|
parser.add_argument(
|
||||||
# required=True,
|
'-l',
|
||||||
action=readable_dir,
|
'--lightroom',
|
||||||
dest='lightroom_folder',
|
# required=True,
|
||||||
metavar='LIGHTROOM FOLDER',
|
action=readable_dir,
|
||||||
help='Lightroom catalogue base folder'
|
dest='lightroom_folder',
|
||||||
)
|
metavar='LIGHTROOM FOLDER',
|
||||||
|
help='Lightroom catalogue base folder'
|
||||||
|
)
|
||||||
|
|
||||||
# strict LR check with base path next to the file base name
|
# strict LR check with base path next to the file base name
|
||||||
parser.add_argument('-s', '--strict',
|
parser.add_argument(
|
||||||
dest='lightroom_strict',
|
'-s',
|
||||||
action='store_true',
|
'--strict',
|
||||||
help='Do strict check for Lightroom files including Path in query'
|
dest='lightroom_strict',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Do strict check for Lightroom files including Path in query'
|
||||||
|
)
|
||||||
|
|
||||||
# set behaviour override
|
# set behaviour override
|
||||||
# FLAG: default: only set not filled
|
# FLAG: default: only set not filled
|
||||||
# other: overwrite all or overwrite if one is missing, overwrite specifc field (as defined below)
|
# other: overwrite all or overwrite if one is missing, overwrite specifc field (as defined below)
|
||||||
# fields: Location, City, State, Country, CountryCode
|
# fields: Location, City, State, Country, CountryCode
|
||||||
parser.add_argument('-f', '--field',
|
parser.add_argument(
|
||||||
action='append',
|
'-f',
|
||||||
type=str.lower, # make it lowercase for check
|
'--field',
|
||||||
choices=['overwrite', 'location', 'city', 'state', 'country', 'countrycode'],
|
action='append',
|
||||||
dest='field_controls',
|
type=str.lower, # make it lowercase for check
|
||||||
metavar='<overwrite, location, city, state, country, countrycode>',
|
choices=['overwrite', 'location', 'city', 'state', 'country', 'countrycode'],
|
||||||
help='On default only set fields that are not set yet. Options are: '\
|
dest='field_controls',
|
||||||
'Overwrite (write all new), Location, City, State, Country, CountryCode. '\
|
metavar='<overwrite, location, city, state, country, countrycode>',
|
||||||
'Multiple can be given for combination overwrite certain fields only or set only certain fields. '\
|
help='On default only set fields that are not set yet. Options are: '\
|
||||||
'If with overwrite the field will be overwritten if already set, else it will be always skipped.'
|
'Overwrite (write all new), Location, City, State, Country, CountryCode. '\
|
||||||
)
|
'Multiple can be given for combination overwrite certain fields only or set only certain fields. '\
|
||||||
|
'If with overwrite the field will be overwritten if already set, else it will be always skipped.'
|
||||||
|
)
|
||||||
|
|
||||||
parser.add_argument('-d', '--fuzzy-cache',
|
parser.add_argument(
|
||||||
type=str.lower,
|
'-d',
|
||||||
action=distance_values,
|
'--fuzzy-cache',
|
||||||
nargs='?',
|
type=str.lower,
|
||||||
const='10m', # default is 10m
|
action=distance_values,
|
||||||
dest='fuzzy_distance',
|
nargs='?',
|
||||||
metavar='FUZZY DISTANCE',
|
const='10m', # default is 10m
|
||||||
help='Allow fuzzy distance cache lookup. Optional distance can be given, '\
|
dest='fuzzy_distance',
|
||||||
'if not set default of 10m is used. '\
|
metavar='FUZZY DISTANCE',
|
||||||
'Allowed argument is in the format of 12m or 12km'
|
help='Allow fuzzy distance cache lookup. Optional distance can be given, '\
|
||||||
)
|
'if not set default of 10m is used. '\
|
||||||
|
'Allowed argument is in the format of 12m or 12km'
|
||||||
|
)
|
||||||
|
|
||||||
# Google Maps API key to overcome restrictions
|
# Google Maps API key to overcome restrictions
|
||||||
parser.add_argument('-g', '--google',
|
parser.add_argument(
|
||||||
dest='google_api_key',
|
'-g',
|
||||||
metavar='GOOGLE API KEY',
|
'--google',
|
||||||
help='Set a Google API Maps key to overcome the default lookup limitations'
|
dest='google_api_key',
|
||||||
)
|
metavar='GOOGLE API KEY',
|
||||||
|
help='Set a Google API Maps key to overcome the default lookup limitations'
|
||||||
|
)
|
||||||
|
|
||||||
# use open street maps
|
# use open street maps
|
||||||
parser.add_argument('-o', '--openstreetmap',
|
parser.add_argument(
|
||||||
dest='use_openstreetmap',
|
'-o',
|
||||||
action='store_true',
|
'--openstreetmap',
|
||||||
help='Use openstreetmap instead of Google'
|
dest='use_openstreetmap',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Use openstreetmap instead of Google'
|
||||||
|
)
|
||||||
|
|
||||||
# email of open street maps requests
|
# email of open street maps requests
|
||||||
parser.add_argument('-e', '--email',
|
parser.add_argument(
|
||||||
dest='email',
|
'-e',
|
||||||
metavar='EMIL ADDRESS',
|
'--email',
|
||||||
help='An email address for OpenStreetMap'
|
dest='email',
|
||||||
)
|
metavar='EMIL ADDRESS',
|
||||||
|
help='An email address for OpenStreetMap'
|
||||||
|
)
|
||||||
|
|
||||||
# write api/email settings to config file
|
# write api/email settings to config file
|
||||||
parser.add_argument('-w', '--write-settings',
|
parser.add_argument(
|
||||||
dest='config_write',
|
'-w',
|
||||||
action='store_true',
|
'--write-settings',
|
||||||
help='Write Google API or OpenStreetMap email to config file'
|
dest='config_write',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Write Google API or OpenStreetMap email to config file'
|
||||||
|
)
|
||||||
|
|
||||||
# only read data and print on screen, do not write anything
|
# only read data and print on screen, do not write anything
|
||||||
parser.add_argument('-r', '--read-only',
|
parser.add_argument(
|
||||||
dest='read_only',
|
'-r',
|
||||||
action='store_true',
|
'--read-only',
|
||||||
help='Read current values from the XMP file only, do not read from LR or lookup any data and write back'
|
dest='read_only',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Read current values from the XMP file only, do not read from LR or lookup any data and write back'
|
||||||
|
)
|
||||||
|
|
||||||
# only list unset ones
|
# only list unset ones
|
||||||
parser.add_argument('-u', '--unset-only',
|
parser.add_argument(
|
||||||
dest='unset_only',
|
'-u',
|
||||||
action='store_true',
|
'--unset-only',
|
||||||
help='Only list unset XMP files'
|
dest='unset_only',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Only list unset XMP files'
|
||||||
|
)
|
||||||
|
|
||||||
|
# only list unset GPS codes
|
||||||
|
parser.add_argument(
|
||||||
|
'-p',
|
||||||
|
'--unset-gps-only',
|
||||||
|
dest='unset_gps_only',
|
||||||
|
action='store_true',
|
||||||
|
help='Only list unset XMP files for GPS fields'
|
||||||
|
)
|
||||||
|
|
||||||
# don't try to do auto adjust in list view
|
# don't try to do auto adjust in list view
|
||||||
parser.add_argument('-a', '--no-autoadjust',
|
parser.add_argument(
|
||||||
dest='no_autoadjust',
|
'-a',
|
||||||
action='store_true',
|
'--no-autoadjust',
|
||||||
help='Don\'t try to auto adjust columns'
|
dest='no_autoadjust',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Don\'t try to auto adjust columns'
|
||||||
|
)
|
||||||
|
|
||||||
# compact view, compresses columns down to a minimum
|
# compact view, compresses columns down to a minimum
|
||||||
parser.add_argument('-c', '--compact',
|
parser.add_argument(
|
||||||
dest='compact_view',
|
'-c',
|
||||||
action='store_true',
|
'--compact',
|
||||||
help='Very compact list view'
|
dest='compact_view',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Very compact list view'
|
||||||
|
)
|
||||||
|
|
||||||
# Do not create backup files
|
# Do not create backup files
|
||||||
parser.add_argument('-n', '--nobackup',
|
parser.add_argument(
|
||||||
dest='no_xmp_backup',
|
'-n',
|
||||||
action='store_true',
|
'--nobackup',
|
||||||
help='Do not create a backup from the XMP file'
|
dest='no_xmp_backup',
|
||||||
)
|
action='store_true',
|
||||||
|
help='Do not create a backup from the XMP file'
|
||||||
|
)
|
||||||
|
|
||||||
# verbose args for more detailed output
|
# verbose args for more detailed output
|
||||||
parser.add_argument('-v', '--verbose',
|
parser.add_argument(
|
||||||
action='count',
|
'-v',
|
||||||
dest='verbose',
|
'--verbose',
|
||||||
help='Set verbose output level'
|
action='count',
|
||||||
)
|
dest='verbose',
|
||||||
|
help='Set verbose output level'
|
||||||
|
)
|
||||||
|
|
||||||
# debug flag
|
# debug flag
|
||||||
parser.add_argument('--debug', action='store_true', dest='debug', help='Set detailed debug output')
|
parser.add_argument('--debug', action='store_true', dest='debug', help='Set detailed debug output')
|
||||||
@@ -837,7 +878,7 @@ if args.email and not args.use_openstreetmap:
|
|||||||
error = True
|
error = True
|
||||||
# if email and not basic valid email (@ .)
|
# if email and not basic valid email (@ .)
|
||||||
if args.email:
|
if args.email:
|
||||||
if not re.match('^.+@.+\.[A-Za-z]{1,}$', args.email):
|
if not re.match(r'^.+@.+\.[A-Za-z]{1,}$', args.email):
|
||||||
print("Not a valid email for OpenStreetMap: {}".format(args.email))
|
print("Not a valid email for OpenStreetMap: {}".format(args.email))
|
||||||
error = True
|
error = True
|
||||||
# on error exit here
|
# on error exit here
|
||||||
@@ -980,6 +1021,8 @@ if args.lightroom_folder:
|
|||||||
cur = lrdb.cursor()
|
cur = lrdb.cursor()
|
||||||
# flag that we have Lightroom DB
|
# flag that we have Lightroom DB
|
||||||
use_lightroom = True
|
use_lightroom = True
|
||||||
|
if args.debug:
|
||||||
|
print("### USE Lightroom {}".format(use_lightroom))
|
||||||
|
|
||||||
# on error exit here
|
# on error exit here
|
||||||
if error:
|
if error:
|
||||||
@@ -1003,8 +1046,8 @@ for xmp_file_source in args.xmp_sources:
|
|||||||
# 2) file is not in exclude list
|
# 2) file is not in exclude list
|
||||||
# 3) full folder is not in exclude list
|
# 3) full folder is not in exclude list
|
||||||
if file.endswith(".xmp") and ".BK." not in file \
|
if file.endswith(".xmp") and ".BK." not in file \
|
||||||
and "{}/{}".format(root, file) not in args.exclude_sources \
|
and "{}/{}".format(root, file) not in args.exclude_sources \
|
||||||
and root.rstrip('/') not in [x.rstrip('/') for x in args.exclude_sources]:
|
and root.rstrip('/') not in [x.rstrip('/') for x in args.exclude_sources]:
|
||||||
if "{}/{}".format(root, file) not in work_files:
|
if "{}/{}".format(root, file) not in work_files:
|
||||||
work_files.append("{}/{}".format(root, file))
|
work_files.append("{}/{}".format(root, file))
|
||||||
count['all'] += 1
|
count['all'] += 1
|
||||||
@@ -1088,7 +1131,7 @@ if args.read_only:
|
|||||||
|
|
||||||
# ### MAIN WORK LOOP
|
# ### MAIN WORK LOOP
|
||||||
# now we just loop through each file and work on them
|
# now we just loop through each file and work on them
|
||||||
for xmp_file in work_files:
|
for xmp_file in work_files: # noqa: C901
|
||||||
if not args.read_only:
|
if not args.read_only:
|
||||||
print("---> {}: ".format(xmp_file), end='')
|
print("---> {}: ".format(xmp_file), end='')
|
||||||
|
|
||||||
@@ -1102,12 +1145,16 @@ for xmp_file in work_files:
|
|||||||
# read fields from the XMP file and store in hash
|
# read fields from the XMP file and store in hash
|
||||||
xmp.parse_from_str(strbuffer)
|
xmp.parse_from_str(strbuffer)
|
||||||
for xmp_field in xmp_fields:
|
for xmp_field in xmp_fields:
|
||||||
data_set[xmp_field] = xmp.get_property(xmp_fields[xmp_field], xmp_field)
|
# need to check if propert exist or it will the exempi routine will fail
|
||||||
|
if xmp.does_property_exist(xmp_fields[xmp_field], xmp_field):
|
||||||
|
data_set[xmp_field] = xmp.get_property(xmp_fields[xmp_field], xmp_field)
|
||||||
|
else:
|
||||||
|
data_set[xmp_field] = ''
|
||||||
if args.debug:
|
if args.debug:
|
||||||
print("### => XMP: {}:{} => {}".format(xmp_fields[xmp_field], xmp_field, data_set[xmp_field]))
|
print("### => XMP: {}:{} => {}".format(xmp_fields[xmp_field], xmp_field, data_set[xmp_field]))
|
||||||
if args.read_only:
|
if args.read_only:
|
||||||
# view only if list all or if data is unset
|
# view only if list all or if data is unset
|
||||||
if not args.unset_only or (args.unset_only and '' in data_set.values()):
|
if (not args.unset_only and not args.unset_gps_only) or (args.unset_only and '' in data_set.values()) or (args.unset_gps_only and (not data_set['GPSLatitude'] or not data_set['GPSLongitude'])):
|
||||||
# for read only we print out the data formatted
|
# for read only we print out the data formatted
|
||||||
# headline check, do we need to print that
|
# headline check, do we need to print that
|
||||||
count['read'] = printHeader(header_line.format(page_no=page_no, page_all=page_all), count['read'], header_repeat)
|
count['read'] = printHeader(header_line.format(page_no=page_no, page_all=page_all), count['read'], header_repeat)
|
||||||
@@ -1188,6 +1235,7 @@ for xmp_file in work_files:
|
|||||||
# run this through the overwrite checker to get unset if we have a forced overwrite
|
# run this through the overwrite checker to get unset if we have a forced overwrite
|
||||||
has_unset = False
|
has_unset = False
|
||||||
failed = False
|
failed = False
|
||||||
|
from_cache = False
|
||||||
for loc in data_set_loc:
|
for loc in data_set_loc:
|
||||||
if checkOverwrite(data_set[loc], loc, args.field_controls):
|
if checkOverwrite(data_set[loc], loc, args.field_controls):
|
||||||
has_unset = True
|
has_unset = True
|
||||||
@@ -1226,6 +1274,7 @@ for xmp_file in work_files:
|
|||||||
maps_location = reverseGeolocate(latitude=data_set['GPSLatitude'], longitude=data_set['GPSLongitude'], map_type=map_type)
|
maps_location = reverseGeolocate(latitude=data_set['GPSLatitude'], longitude=data_set['GPSLongitude'], map_type=map_type)
|
||||||
# cache data with Lat/Long
|
# cache data with Lat/Long
|
||||||
data_cache[cache_key] = maps_location
|
data_cache[cache_key] = maps_location
|
||||||
|
from_cache = False
|
||||||
else:
|
else:
|
||||||
maps_location = data_cache[best_match_latlong]
|
maps_location = data_cache[best_match_latlong]
|
||||||
# cache this one, because the next one will match this one too
|
# cache this one, because the next one will match this one too
|
||||||
@@ -1233,10 +1282,12 @@ for xmp_file in work_files:
|
|||||||
data_cache[cache_key] = maps_location
|
data_cache[cache_key] = maps_location
|
||||||
count['cache'] += 1
|
count['cache'] += 1
|
||||||
count['fuzzy_cache'] += 1
|
count['fuzzy_cache'] += 1
|
||||||
|
from_cache = True
|
||||||
else:
|
else:
|
||||||
# load location from cache
|
# load location from cache
|
||||||
maps_location = data_cache[cache_key]
|
maps_location = data_cache[cache_key]
|
||||||
count['cache'] += 1
|
count['cache'] += 1
|
||||||
|
from_cache = True
|
||||||
# overwrite sets (note options check here)
|
# overwrite sets (note options check here)
|
||||||
if args.debug:
|
if args.debug:
|
||||||
print("### Map Location ({}): {}".format(map_type, maps_location))
|
print("### Map Location ({}): {}".format(map_type, maps_location))
|
||||||
@@ -1279,8 +1330,11 @@ for xmp_file in work_files:
|
|||||||
with open(xmp_file, 'w') as fptr:
|
with open(xmp_file, 'w') as fptr:
|
||||||
fptr.write(xmp.serialize_to_str(omit_packet_wrapper=True))
|
fptr.write(xmp.serialize_to_str(omit_packet_wrapper=True))
|
||||||
else:
|
else:
|
||||||
print("[TEST] Would write {} ".format(data_set, xmp_file), end='')
|
print("[TEST] Would write {} {}".format(data_set, xmp_file), end='')
|
||||||
print("[UPDATED]")
|
if from_cache:
|
||||||
|
print("[UPDATED FROM CACHE]")
|
||||||
|
else:
|
||||||
|
print("[UPDATED]")
|
||||||
count['changed'] += 1
|
count['changed'] += 1
|
||||||
elif failed:
|
elif failed:
|
||||||
print("[FAILED]")
|
print("[FAILED]")
|
||||||
|
|||||||
Reference in New Issue
Block a user