This commit is contained in:
Daniel Knüttel 2019-08-14 12:07:39 +02:00
parent cd60bd9c47
commit fcaf6e3803
11 changed files with 476 additions and 476 deletions

View File

@ -5,23 +5,23 @@ import docopt
usage = ''' usage = '''
Usage: Usage:
autoimport copy SRC_PATH DST_PATH [options] autoimport copy SRC_PATH DST_PATH [options]
autoimport move SRC_PATH DST_PATH [options] autoimport move SRC_PATH DST_PATH [options]
autoimport placeholders autoimport placeholders
autoimport select SRC_PATH [options] autoimport select SRC_PATH [options]
Options: Options:
-t <specifer> --path-template=<specifer> The template for creating the new directory structure -t <specifer> --path-template=<specifer> The template for creating the new directory structure
[default: <DateTime.year>/<DateTime.month>/<DateTime.day>] [default: <DateTime.year>/<DateTime.month>/<DateTime.day>]
-n --no-select-stop-on-error Do not stop selecting files when an error occurs. -n --no-select-stop-on-error Do not stop selecting files when an error occurs.
-w --walk Walk the directory tree when selecting files. -w --walk Walk the directory tree when selecting files.
-p <postfix> --postfix=<postfix> Comma separated list of postfixes for files to look for when -p <postfix> --postfix=<postfix> Comma separated list of postfixes for files to look for when
selecting files [default: JPG,NEF]. selecting files [default: JPG,NEF].
-d --dry-run Do not write changes. -d --dry-run Do not write changes.
-v --verbose Generate more output. -v --verbose Generate more output.
-D --debug Turn on debug messages. -D --debug Turn on debug messages.
-i <dbtype> --implementation=<dbtype> Internal database type (mem|disk) [default: mem] -i <dbtype> --implementation=<dbtype> Internal database type (mem|disk) [default: mem]
-s <db_file> --storage=<db_file> Use an external database with path db_file [default: ]
''' '''
@ -32,48 +32,48 @@ from .tmpdb import get_temporary_db
args = docopt.docopt(usage) args = docopt.docopt(usage)
if(args["--verbose"]): if(args["--verbose"]):
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO) logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if(args["--debug"]): if(args["--debug"]):
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG) logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG)
logging.debug("ARGUMENTS:") logging.debug("ARGUMENTS:")
for k,v in args.items(): for k,v in args.items():
logging.debug("\t{}: \t{}".format(k,v)) logging.debug("\t{}: \t{}".format(k,v))
try: try:
db = get_temporary_db(args["--implementation"]) db = get_temporary_db(args["--implementation"])
except Exception as e: except Exception as e:
print(e) print(e)
sys.exit(1) sys.exit(1)
if(args["placeholders"]): if(args["placeholders"]):
result = placeholders() result = placeholders()
elif(args["copy"]): elif(args["copy"]):
result = copy(db result = copy(db
, args["SRC_PATH"] , args["SRC_PATH"]
, args["DST_PATH"] , args["DST_PATH"]
, args["--path-template"] , args["--path-template"]
, not args["--no-select-stop-on-error"] , not args["--no-select-stop-on-error"]
, args["--walk"] , args["--walk"]
, args["--postfix"] , args["--postfix"]
, args["--dry-run"]) , args["--dry-run"])
elif(args["move"]): elif(args["move"]):
result = move(db result = move(db
, args["SRC_PATH"] , args["SRC_PATH"]
, args["DST_PATH"] , args["DST_PATH"]
, args["--path-template"] , args["--path-template"]
, not args["--no-select-stop-on-error"] , not args["--no-select-stop-on-error"]
, args["--walk"] , args["--walk"]
, args["--postfix"] , args["--postfix"]
, args["--dry-run"]) , args["--dry-run"])
elif(args["select"]): elif(args["select"]):
result = select(db result = select(db
, args["SRC_PATH"] , args["SRC_PATH"]
, not args["--no-select-stop-on-error"] , not args["--no-select-stop-on-error"]
, args["--walk"] , args["--walk"]
, args["--postfix"] , args["--postfix"]
, args["--dry-run"]) , args["--dry-run"])

View File

@ -8,140 +8,140 @@ from .write.files import write_files
def placeholders(): def placeholders():
for p in sorted(ph): for p in sorted(ph):
print(p) print(p)
return 0 return 0
def select(db, src_path, stop_on_error, walk, postfix, dryrun): def select(db, src_path, stop_on_error, walk, postfix, dryrun):
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
extensions = postfix.split(",") extensions = postfix.split(",")
try: try:
findall(src_path, walk, extensions, db, stop_on_error) findall(src_path, walk, extensions, db, stop_on_error)
except Exception as e: except Exception as e:
logger.error(e) logger.error(e)
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
return 1 return 1
cursor = db.cursor() cursor = db.cursor()
result = cursor.execute( result = cursor.execute(
'''SELECT * FROM FILES''' '''SELECT * FROM FILES'''
) )
for line in result: for line in result:
print(line[0]) print(line[0])
for k,v in zip(("DateTime" for k,v in zip(("DateTime"
, "DateTimeDigitized" , "DateTimeDigitized"
, "DateTimeOriginal" , "DateTimeOriginal"
, "Model" , "Model"
, "Make" , "Make"
, "Software"),line[1:]): , "Software"),line[1:]):
print("\t", k, ":", v) print("\t", k, ":", v)
cursor.execute( cursor.execute(
'''SELECT COUNT(name) FROM FILES''' '''SELECT COUNT(name) FROM FILES'''
) )
print("found {} files".format(cursor.fetchone()[0])) print("found {} files".format(cursor.fetchone()[0]))
return 0 return 0
def copy(db def copy(db
, src_path , src_path
, dst_path , dst_path
, path_template , path_template
, stop_on_error , stop_on_error
, walk , walk
, postfix , postfix
, dryrun): , dryrun):
return do_copy_or_move(db return do_copy_or_move(db
, src_path , src_path
, dst_path , dst_path
, path_template , path_template
, stop_on_error , stop_on_error
, walk , walk
, postfix , postfix
, dryrun , dryrun
, False) , False)
def move(db def move(db
, src_path , src_path
, dst_path , dst_path
, path_template , path_template
, stop_on_error , stop_on_error
, walk , walk
, postfix , postfix
, dryrun): , dryrun):
return do_copy_or_move(db return do_copy_or_move(db
, src_path , src_path
, dst_path , dst_path
, path_template , path_template
, stop_on_error , stop_on_error
, walk , walk
, postfix , postfix
, dryrun , dryrun
, True) , True)
def do_copy_or_move(db def do_copy_or_move(db
, src_path , src_path
, dst_path , dst_path
, path_template , path_template
, stop_on_error , stop_on_error
, walk , walk
, postfix , postfix
, dryrun , dryrun
, move): , move):
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
extensions = postfix.split(",") extensions = postfix.split(",")
try: try:
findall(src_path, walk, extensions, db, stop_on_error) findall(src_path, walk, extensions, db, stop_on_error)
except Exception as e: except Exception as e:
logger.error(e) logger.error(e)
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
return 1 return 1
cursor = db.cursor() cursor = db.cursor()
cursor.execute( cursor.execute(
'''SELECT COUNT(name) FROM FILES''' '''SELECT COUNT(name) FROM FILES'''
) )
print("found {} files".format(cursor.fetchone()[0])) print("found {} files".format(cursor.fetchone()[0]))
try: try:
path_specifier = get_path_specifier(path_template) path_specifier = get_path_specifier(path_template)
except Exception as e: except Exception as e:
logger.error(str(e)) logger.error(str(e))
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
return 2 return 2
order(db, path_specifier) order(db, path_specifier)
cursor.execute( cursor.execute(
'''SELECT COUNT(rowid) FROM ASSOCIATIONS''' '''SELECT COUNT(rowid) FROM ASSOCIATIONS'''
) )
print("created {} associations between files and directories".format(cursor.fetchone()[0])) print("created {} associations between files and directories".format(cursor.fetchone()[0]))
cursor.execute( cursor.execute(
'''SELECT COUNT(name) FROM DIRECTORIES''' '''SELECT COUNT(name) FROM DIRECTORIES'''
) )
print("will create {} new directories".format(cursor.fetchone()[0])) print("will create {} new directories".format(cursor.fetchone()[0]))
for line in db._db.iterdump(): for line in db._db.iterdump():
logging.debug(line) logging.debug(line)
try: try:
create_paths(db, dst_path, dryrun) create_paths(db, dst_path, dryrun)
except Exception as e: except Exception as e:
logger.error(str(e)) logger.error(str(e))
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
return 3 return 3
try: try:
write_files(db, dst_path, src_path, move, dryrun) write_files(db, dst_path, src_path, move, dryrun)
except Exception as e: except Exception as e:
logger.error(str(e)) logger.error(str(e))
logger.debug(traceback.format_exc()) logger.debug(traceback.format_exc())
return 3 return 3
print("done") print("done")

View File

@ -1,5 +1,5 @@
import datetime import datetime
def get_datetime(time_str): def get_datetime(time_str):
time_format = "%Y:%m:%d %H:%M:%S" time_format = "%Y:%m:%d %H:%M:%S"
return datetime.datetime.strptime(time_str, time_format) return datetime.datetime.strptime(time_str, time_format)

View File

@ -4,77 +4,77 @@ from .date_and_time import get_datetime
def order(db, path_specifier): def order(db, path_specifier):
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
cursor = db.cursor() cursor = db.cursor()
result = cursor.execute( result = cursor.execute(
'''SELECT rowid, '''SELECT rowid,
name, name,
DateTime, DateTime,
DateTimeDigitized, DateTimeDigitized,
DateTimeOriginal, DateTimeOriginal,
Model, Model,
Make, Make,
Software Software
FROM FILES''' FROM FILES'''
) )
for (rowid for (rowid
, name , name
, DateTime , DateTime
, DateTimeDigitized , DateTimeDigitized
, DateTimeOriginal , DateTimeOriginal
, Model , Model
, Make , Make
, Software) in cursor.fetchall(): , Software) in cursor.fetchall():
DateTime = get_datetime(DateTime) DateTime = get_datetime(DateTime)
DateTimeDigitized = get_datetime(DateTimeDigitized) DateTimeDigitized = get_datetime(DateTimeDigitized)
DateTimeOriginal = get_datetime(DateTimeOriginal) DateTimeOriginal = get_datetime(DateTimeOriginal)
data = { data = {
"<name>": name, "<name>": name,
"<DateTime.day>": str(DateTime.day).zfill(2), "<DateTime.day>": str(DateTime.day).zfill(2),
"<DateTime.month>": str(DateTime.month).zfill(2), "<DateTime.month>": str(DateTime.month).zfill(2),
"<DateTime.year>": DateTime.year, "<DateTime.year>": DateTime.year,
"<DateTime.hour>": str(DateTime.hour).zfill(2), "<DateTime.hour>": str(DateTime.hour).zfill(2),
"<DateTime.minute>": str(DateTime.minute).zfill(2), "<DateTime.minute>": str(DateTime.minute).zfill(2),
"<DateTime.second>": str(DateTime.second).zfill(2), "<DateTime.second>": str(DateTime.second).zfill(2),
"<DateTimeDigitized.day>": str(DateTimeDigitized.day).zfill(2), "<DateTimeDigitized.day>": str(DateTimeDigitized.day).zfill(2),
"<DateTimeDigitized.month>": str(DateTimeDigitized.month).zfill(2), "<DateTimeDigitized.month>": str(DateTimeDigitized.month).zfill(2),
"<DateTimeDigitized.year>": DateTimeDigitized.year, "<DateTimeDigitized.year>": DateTimeDigitized.year,
"<DateTimeDigitized.hour>": str(DateTimeDigitized.hour).zfill(2), "<DateTimeDigitized.hour>": str(DateTimeDigitized.hour).zfill(2),
"<DateTimeDigitized.minute>": str(DateTimeDigitized.minute).zfill(2), "<DateTimeDigitized.minute>": str(DateTimeDigitized.minute).zfill(2),
"<DateTimeDigitized.second>": str(DateTimeDigitized.second).zfill(2), "<DateTimeDigitized.second>": str(DateTimeDigitized.second).zfill(2),
"<DateTimeOriginal.day>": str(DateTimeOriginal.day).zfill(2), "<DateTimeOriginal.day>": str(DateTimeOriginal.day).zfill(2),
"<DateTimeOriginal.month>": str(DateTimeOriginal.month).zfill(2), "<DateTimeOriginal.month>": str(DateTimeOriginal.month).zfill(2),
"<DateTimeOriginal.year>": DateTimeOriginal.year, "<DateTimeOriginal.year>": DateTimeOriginal.year,
"<DateTimeOriginal.hour>": str(DateTimeOriginal.hour).zfill(2), "<DateTimeOriginal.hour>": str(DateTimeOriginal.hour).zfill(2),
"<DateTimeOriginal.minute>": str(DateTimeOriginal.minute).zfill(2), "<DateTimeOriginal.minute>": str(DateTimeOriginal.minute).zfill(2),
"<DateTimeOriginal.second>": str(DateTimeOriginal.second).zfill(2), "<DateTimeOriginal.second>": str(DateTimeOriginal.second).zfill(2),
"<Model>": Model, "<Model>": Model,
"<Make>": Make, "<Make>": Make,
"<Software>": Software "<Software>": Software
} }
this_path = [str(data[p]) if p in data else p for p in path_specifier] this_path = [str(data[p]) if p in data else p for p in path_specifier]
logger.debug(this_path) logger.debug(this_path)
this_path = os.path.join(*this_path) this_path = os.path.join(*this_path)
path_id = get_path_id(db, this_path) path_id = get_path_id(db, this_path)
cursor.execute("INSERT INTO ASSOCIATIONS(file_id, directory_id) VALUES(?, ?)", (rowid, path_id)) cursor.execute("INSERT INTO ASSOCIATIONS(file_id, directory_id) VALUES(?, ?)", (rowid, path_id))
def get_path_id(db, path): def get_path_id(db, path):
cursor = db.cursor() cursor = db.cursor()
cursor.execute("SELECT rowid FROM DIRECTORIES WHERE name=?", (path,)) cursor.execute("SELECT rowid FROM DIRECTORIES WHERE name=?", (path,))
result = cursor.fetchone() result = cursor.fetchone()
if(result): if(result):
return result[0] return result[0]
cursor.execute("INSERT INTO DIRECTORIES(name) VALUES(?)", (path,)) cursor.execute("INSERT INTO DIRECTORIES(name) VALUES(?)", (path,))
return cursor.lastrowid return cursor.lastrowid

View File

@ -8,18 +8,18 @@ The placeholders are marked by chevrons: ``<placeholder>``
An example path specifier might look like this:: An example path specifier might look like this::
<DateTime-year>/<DateTime-month>/<DateTime-day>/images/<Make>/<Model> <DateTime-year>/<DateTime-month>/<DateTime-day>/images/<Make>/<Model>
The resulting internal specifier will be:: The resulting internal specifier will be::
[ [
"<DateTime-year>" "<DateTime-year>"
, "<DateTime-month>" , "<DateTime-month>"
, "<DateTime-day>" , "<DateTime-day>"
, "images" , "images"
, "<Make>" , "<Make>"
, "<Model>" , "<Model>"
] ]
It will also check whether the placeholders are actually valid. It will also check whether the placeholders are actually valid.
""" """
@ -28,35 +28,35 @@ import os
placeholders = { placeholders = {
"<name>", "<name>",
"<DateTime.day>", "<DateTime.day>",
"<DateTime.month>", "<DateTime.month>",
"<DateTime.year>", "<DateTime.year>",
"<DateTime.hour>", "<DateTime.hour>",
"<DateTime.minute>", "<DateTime.minute>",
"<DateTime.second>", "<DateTime.second>",
"<DateTimeDigitized.day>", "<DateTimeDigitized.day>",
"<DateTimeDigitized.month>", "<DateTimeDigitized.month>",
"<DateTimeDigitized.year>", "<DateTimeDigitized.year>",
"<DateTimeDigitized.hour>", "<DateTimeDigitized.hour>",
"<DateTimeDigitized.minute>", "<DateTimeDigitized.minute>",
"<DateTimeDigitized.second>", "<DateTimeDigitized.second>",
"<DateTimeOriginal.day>", "<DateTimeOriginal.day>",
"<DateTimeOriginal.month>", "<DateTimeOriginal.month>",
"<DateTimeOriginal.year>", "<DateTimeOriginal.year>",
"<DateTimeOriginal.hour>", "<DateTimeOriginal.hour>",
"<DateTimeOriginal.minute>", "<DateTimeOriginal.minute>",
"<DateTimeOriginal.second>", "<DateTimeOriginal.second>",
"<Model>", "<Model>",
"<Make>", "<Make>",
"<Software>" "<Software>"
} }
def get_path_specifier(string_path_specifer): def get_path_specifier(string_path_specifer):
data = string_path_specifer.split(os.path.sep) data = string_path_specifer.split(os.path.sep)
for d in data: for d in data:
if((d.startswith("<") if((d.startswith("<")
and d.endswith(">")) and d.endswith(">"))
and d not in placeholders): and d not in placeholders):
raise ValueError("unknown placeholder: {}".format(d)) raise ValueError("unknown placeholder: {}".format(d))
return data return data

View File

@ -5,62 +5,62 @@ import exifread
def extract_metadata_from_file(filename): def extract_metadata_from_file(filename):
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
logger.info("handling: {}".format(filename)) logger.info("handling: {}".format(filename))
try: try:
img = Image.open(filename) img = Image.open(filename)
except Exception as e: except Exception as e:
logger.error("failed to open and load '{}'".format(filename)) logger.error("failed to open and load '{}'".format(filename))
img.close() img.close()
raise e raise e
if(hasattr(img, "_getexif")): if(hasattr(img, "_getexif")):
try: try:
exif = {ExifTags.TAGS[k]: v for k, v in img._getexif().items() if k in ExifTags.TAGS} exif = {ExifTags.TAGS[k]: v for k, v in img._getexif().items() if k in ExifTags.TAGS}
except Exception as e: except Exception as e:
logger.error("failed to read EXIF data from '{}'".format(filename)) logger.error("failed to read EXIF data from '{}'".format(filename))
raise e raise e
finally: finally:
img.close() img.close()
else: else:
img.close() img.close()
# We cannot use PIL because PIL is crap. So we use # We cannot use PIL because PIL is crap. So we use
# exifread. This is a little slower but will produce # exifread. This is a little slower but will produce
# results more safely. # results more safely.
exif = get_exif_with_exifread(filename) exif = get_exif_with_exifread(filename)
values_no_preprocessing = {"DateTime" values_no_preprocessing = {"DateTime"
, "DateTimeDigitized" , "DateTimeDigitized"
, "DateTimeOriginal" , "DateTimeOriginal"
, "Model" , "Model"
, "Make" , "Make"
, "Software"} , "Software"}
for k in values_no_preprocessing: for k in values_no_preprocessing:
if(not k in exif): if(not k in exif):
logger.error("missing EXIF value {} in '{}'".format( logger.error("missing EXIF value {} in '{}'".format(
k, filename)) k, filename))
raise KeyError("missing EXIF value {}".format(k)) raise KeyError("missing EXIF value {}".format(k))
result = {k: exif[k] for k in values_no_preprocessing} result = {k: exif[k] for k in values_no_preprocessing}
return result return result
def get_exif_with_exifread(filename): def get_exif_with_exifread(filename):
with open(filename, "rb") as image: with open(filename, "rb") as image:
tags = exifread.process_file(image) tags = exifread.process_file(image)
exif_tag_header = "EXIF " exif_tag_header = "EXIF "
exif_tag_header_length = len(exif_tag_header) exif_tag_header_length = len(exif_tag_header)
data = {k[exif_tag_header_length:]: v.values for k,v in tags.items() data = {k[exif_tag_header_length:]: v.values for k,v in tags.items()
if k.startswith(exif_tag_header)} if k.startswith(exif_tag_header)}
# Not all the tags we want are in the EXIF section. # Not all the tags we want are in the EXIF section.
data_from_image_section = {"DateTime", "Make", "Software", "Model"} data_from_image_section = {"DateTime", "Make", "Software", "Model"}
image_tag_header = "Image " image_tag_header = "Image "
data.update({key: tags[real_key].values for key, real_key in data.update({key: tags[real_key].values for key, real_key in
((i, image_tag_header + i) for i in data_from_image_section)}) ((i, image_tag_header + i) for i in data_from_image_section)})
return data return data

View File

@ -6,49 +6,49 @@ module_logger = logging.getLogger(__name__)
from .metadata import extract_metadata_from_file from .metadata import extract_metadata_from_file
def findall_this_directory(directory, files, extensions, db, stop_on_error): def findall_this_directory(directory, files, extensions, db, stop_on_error):
for filename in files: for filename in files:
module_logger.debug("handling file: {}".format(filename)) module_logger.debug("handling file: {}".format(filename))
if(filename.split(".")[-1] in extensions): if(filename.split(".")[-1] in extensions):
filename = os.path.join(directory, filename) filename = os.path.join(directory, filename)
insert_file_into_db(filename, db, stop_on_error) insert_file_into_db(filename, db, stop_on_error)
def insert_file_into_db(filename, db, stop_on_error): def insert_file_into_db(filename, db, stop_on_error):
try: try:
metadata = extract_metadata_from_file(filename) metadata = extract_metadata_from_file(filename)
except Exception as e: except Exception as e:
if(stop_on_error): if(stop_on_error):
module_logger.error( module_logger.error(
"an error occured, the program execution ends now, set ``--no-select-stop-on-error`` to continue anyways") "an error occured, the program execution ends now, set ``--no-select-stop-on-error`` to continue anyways")
module_logger.error("file was: {}".format(filename)) module_logger.error("file was: {}".format(filename))
raise e raise e
module_logger.error("ignoring error") module_logger.error("ignoring error")
return return
data = [filename] data = [filename]
metadata_keys = ["DateTime" metadata_keys = ["DateTime"
, "DateTimeDigitized" , "DateTimeDigitized"
, "DateTimeOriginal" , "DateTimeOriginal"
, "Model" , "Model"
, "Make" , "Make"
, "Software"] , "Software"]
data.extend([metadata[k] for k in metadata_keys]) data.extend([metadata[k] for k in metadata_keys])
cursor = db.cursor() cursor = db.cursor()
cursor.execute('''INSERT INTO FILES(name, cursor.execute('''INSERT INTO FILES(name,
DateTime, DateTime,
DateTimeDigitized, DateTimeDigitized,
DateTimeOriginal, DateTimeOriginal,
Model, Model,
Make, Make,
Software) Software)
VALUES(?, ?, ?, ?, ?, ?, ?)''' VALUES(?, ?, ?, ?, ?, ?, ?)'''
, data) , data)
def findall(directory, walk, extensions, db, stop_on_error): def findall(directory, walk, extensions, db, stop_on_error):
for dir_, paths, files in os.walk(directory): for dir_, paths, files in os.walk(directory):
findall_this_directory(dir_, files, extensions, db, stop_on_error) findall_this_directory(dir_, files, extensions, db, stop_on_error)
if(not walk): if(not walk):
break break

View File

@ -15,98 +15,98 @@ import tempfile
import abc import abc
def _open_db_mem(): def _open_db_mem():
return (sqlite3.connect(":memory:"), None) return (sqlite3.connect(":memory:"), None)
def _open_db_disk(): def _open_db_disk():
file = tempfile.NamedTemporaryFile() file = tempfile.NamedTemporaryFile()
db = sqlite3.connect(file.name) db = sqlite3.connect(file.name)
return (db, file) return (db, file)
class AbstractTemporaryDatabase(abc.ABC): class AbstractTemporaryDatabase(abc.ABC):
""" """
Abstract base class for all ``TemporaryDatabase`` Abstract base class for all ``TemporaryDatabase``
implementations. implementations.
**Note**: ``__init__`` must set ``self._db`` to an **Note**: ``__init__`` must set ``self._db`` to an
open sqlite3 connection. open sqlite3 connection.
""" """
def __init__(self): def __init__(self):
abc.ABC.__init__(self) abc.ABC.__init__(self)
self._db = None self._db = None
@abc.abstractmethod @abc.abstractmethod
def close(self): def close(self):
pass pass
def cursor(self): def cursor(self):
return self._db.cursor() return self._db.cursor()
def dump_db(self, file): def dump_db(self, file):
for line in self._db.iterdump(): for line in self._db.iterdump():
file.write("{}\n".format(line)) file.write("{}\n".format(line))
class MemoryTemporaryDatabase(AbstractTemporaryDatabase): class MemoryTemporaryDatabase(AbstractTemporaryDatabase):
def __init__(self): def __init__(self):
AbstractTemporaryDatabase.__init__(self) AbstractTemporaryDatabase.__init__(self)
self._db,_ = _open_db_mem() self._db,_ = _open_db_mem()
def close(self): def close(self):
self._db.close() self._db.close()
class DiskTemporaryDatabase(AbstractTemporaryDatabase): class DiskTemporaryDatabase(AbstractTemporaryDatabase):
def __init__(self): def __init__(self):
AbstractTemporaryDatabase.__init__(self) AbstractTemporaryDatabase.__init__(self)
db, file = _open_db_disk() db, file = _open_db_disk()
self._db = db self._db = db
self._file = file self._file = file
def close(self): def close(self):
self._db.close() self._db.close()
self._file.close() self._file.close()
def get_temporary_db(type_): def get_temporary_db(type_):
""" """
Return an open ``TemporaryDatabase`` with already set up tables. Return an open ``TemporaryDatabase`` with already set up tables.
``type_`` is either ``"mem"`` for the in-memory implementation or ``type_`` is either ``"mem"`` for the in-memory implementation or
``"disk"`` for the on-disk implementation. ``"disk"`` for the on-disk implementation.
""" """
implementations = {"mem": MemoryTemporaryDatabase, implementations = {"mem": MemoryTemporaryDatabase,
"disk": DiskTemporaryDatabase} "disk": DiskTemporaryDatabase}
if(not type_ in implementations): if(not type_ in implementations):
raise ValueError("unsuppored implementation: {}".format(type_)) raise ValueError("unsuppored implementation: {}".format(type_))
impl = implementations[type_] impl = implementations[type_]
instance = impl() instance = impl()
cursor = instance.cursor() cursor = instance.cursor()
cursor.execute( cursor.execute(
'''CREATE TABLE FILES( '''CREATE TABLE FILES(
name TEXT, name TEXT,
DateTime TEXT, DateTime TEXT,
DateTimeDigitized TEXT, DateTimeDigitized TEXT,
DateTimeOriginal TEXT, DateTimeOriginal TEXT,
Model TEXT, Model TEXT,
Make TEXT, Make TEXT,
Software TEXT)''' Software TEXT)'''
) )
cursor.execute( cursor.execute(
'''CREATE TABLE DIRECTORIES( '''CREATE TABLE DIRECTORIES(
name TEXT)''' name TEXT)'''
) )
cursor.execute( cursor.execute(
'''CREATE TABLE ASSOCIATIONS(file_id INTEGER, '''CREATE TABLE ASSOCIATIONS(file_id INTEGER,
directory_id INTEGER)''' directory_id INTEGER)'''
) )
cursor.execute( cursor.execute(
'''CREATE TABLE KV(key TEXT, '''CREATE TABLE KV(key TEXT,
value TEXT)''' value TEXT)'''
) )
return instance return instance

View File

@ -9,39 +9,39 @@ import shutil
module_logger = logging.getLogger(__name__) module_logger = logging.getLogger(__name__)
def write_files(db def write_files(db
, output_basepath , output_basepath
, input_basepath , input_basepath
, move , move
, dry_run): , dry_run):
""" """
Write the changes in the file structure on the disk. Write the changes in the file structure on the disk.
""" """
cursor = db.cursor() cursor = db.cursor()
result = cursor.execute( result = cursor.execute(
'''SELECT DIRECTORIES.name AS pathname, '''SELECT DIRECTORIES.name AS pathname,
FILES.name AS filename FILES.name AS filename
FROM FILES JOIN ASSOCIATIONS ON FILES.rowid=ASSOCIATIONS.file_id FROM FILES JOIN ASSOCIATIONS ON FILES.rowid=ASSOCIATIONS.file_id
JOIN DIRECTORIES ON DIRECTORIES.rowid=ASSOCIATIONS.directory_id JOIN DIRECTORIES ON DIRECTORIES.rowid=ASSOCIATIONS.directory_id
''' '''
) )
for pathname, filename in result: for pathname, filename in result:
src_name = filename src_name = filename
dst_name = os.path.join(output_basepath, pathname, os.path.basename(filename)) dst_name = os.path.join(output_basepath, pathname, os.path.basename(filename))
if(dry_run): if(dry_run):
module_logger.warning("COPY {} -> {}".format(src_name, dst_name)) module_logger.warning("COPY {} -> {}".format(src_name, dst_name))
if(move): if(move):
module_logger.warning("RM {}".format(src_name)) module_logger.warning("RM {}".format(src_name))
continue continue
module_logger.info("COPY {} -> {}".format(src_name, dst_name)) module_logger.info("COPY {} -> {}".format(src_name, dst_name))
shutil.copyfile(src_name, dst_name) shutil.copyfile(src_name, dst_name)
shutil.copystat(src_name, dst_name) shutil.copystat(src_name, dst_name)
if(move): if(move):
module_logger.info("RM {}".format(src_name)) module_logger.info("RM {}".format(src_name))
os.remove(src_name) os.remove(src_name)

View File

@ -8,25 +8,25 @@ import logging
module_logger = logging.getLogger(__name__) module_logger = logging.getLogger(__name__)
def create_paths(db, base_path, dry_run): def create_paths(db, base_path, dry_run):
cursor = db.cursor() cursor = db.cursor()
result = cursor.execute( result = cursor.execute(
'''SELECT name FROM DIRECTORIES''' '''SELECT name FROM DIRECTORIES'''
) )
for (pathname,) in result: for (pathname,) in result:
real_path_name = os.path.join(base_path, pathname) real_path_name = os.path.join(base_path, pathname)
if(dry_run): if(dry_run):
if(os.path.exists(real_path_name)): if(os.path.exists(real_path_name)):
module_logger.info("EXISTS: {}".format(real_path_name)) module_logger.info("EXISTS: {}".format(real_path_name))
else: else:
module_logger.warn("CREATE {}".format(real_path_name)) module_logger.warn("CREATE {}".format(real_path_name))
continue continue
try: try:
if(os.path.exists(real_path_name)): if(os.path.exists(real_path_name)):
module_logger.info("EXISTS: {}".format(real_path_name)) module_logger.info("EXISTS: {}".format(real_path_name))
else: else:
module_logger.info("CREATE {}".format(real_path_name)) module_logger.info("CREATE {}".format(real_path_name))
os.makedirs(real_path_name) os.makedirs(real_path_name)
except Exception as e: except Exception as e:
module_logger.error("failed to create directory {}".format(real_path_name)) module_logger.error("failed to create directory {}".format(real_path_name))
raise e raise e

View File

@ -1,15 +1,15 @@
from setuptools import setup, find_packages from setuptools import setup, find_packages
setup( setup(
name = "autoimport", name = "autoimport",
version = "0.0.1", version = "0.0.1",
packages = find_packages(), packages = find_packages(),
author = "Daniel Knüttel", author = "Daniel Knüttel",
author_email = "daniel.knuettel@daknuett.eu", author_email = "daniel.knuettel@daknuett.eu",
install_requires = ["docopt"], install_requires = ["docopt", "exifread"],
description = "A script to find, order and copy images", description = "A script to find, order and copy images",
long_description = open("README.rst").read(), long_description = open("README.rst").read(),
entry_points = {"console_scripts": ["autoimport = licor"]} entry_points = {"console_scripts": ["autoimport = licor"]}
) )