pass2bw/pass2csv

278 lines
7.6 KiB
Plaintext
Raw Normal View History

2018-08-15 00:59:11 -04:00
#!/usr/bin/env python3
import argparse
2016-05-29 19:00:02 -04:00
import csv
2018-12-27 12:12:06 -05:00
import logging
import pathlib
import re
import sys
2016-05-29 19:00:02 -04:00
2018-12-27 12:12:06 -05:00
import gnupg
logging.basicConfig(level=logging.INFO)
2018-12-27 12:12:06 -05:00
def set_meta(entry, path, grouping_base):
pure_path = pathlib.PurePath(path)
group = pure_path.relative_to(grouping_base).parent
if group.name == '':
group = ''
entry['group'] = group
entry['title'] = pure_path.stem
2018-12-27 12:12:06 -05:00
def set_data(entry, data, exclude, get_fields, get_lines):
lines = data.splitlines()
tail = lines[1:]
entry['password'] = lines[0]
2018-12-27 12:12:06 -05:00
filtered_tail = []
for line in tail:
for exclude_pattern in exclude:
if exclude_pattern.search(line):
break
else:
filtered_tail.append(line)
matching_indices = set()
fields = entry.setdefault('fields', {})
for i, line in enumerate(filtered_tail):
for name, pattern in get_fields:
if name in fields:
# multiple patterns with same name, we've already found a match
continue
match = pattern.search(line)
if not match:
continue
inverse_match = line[0:match.start()] + line[match.end():]
value = inverse_match.strip()
fields[name] = value
matching_indices.add(i)
break
matching_lines = {}
for i, line in enumerate(filtered_tail):
for name, pattern in get_lines:
match = pattern.search(line)
if not match:
continue
matches = matching_lines.setdefault(name, [])
matches.append(line)
matching_indices.add(i)
break
for name, matches in matching_lines.items():
fields[name] = '\n'.join(matching_lines)
final_tail = []
for i, line in enumerate(filtered_tail):
if i not in matching_indices:
final_tail.append(line)
entry['notes'] = '\n'.join(final_tail).strip()
def write(file, entries, get_fields, get_lines):
get_field_names = set(x[0] for x in get_fields)
get_line_names = set(x[0] for x in get_lines)
field_names = get_field_names | get_line_names
header = ["Group(/)", "Title", "Password", *field_names, "Notes"]
csvw = csv.writer(file)
logging.info("Writing data to %s", file.name)
csvw.writerow(header)
for entry in entries:
fields = [entry['fields'].get(name) for name in field_names]
columns = [
entry['group'], entry['title'], entry['password'],
*fields,
entry['notes']
]
csvw.writerow(columns)
def main(store_path, grouping_base, outfile, gpgbinary, use_agent, encoding,
exclude, get_fields, get_lines):
entries = []
failures = []
path = pathlib.Path(store_path)
grouping_path = pathlib.Path(grouping_base)
gpg = gnupg.GPG(gpgbinary=gpgbinary, use_agent=use_agent)
gpg.encoding = encoding
for file in path.glob('**/*.gpg'):
logging.info("Processing %s", file)
with open(file, 'rb') as fp:
decrypted = gpg.decrypt_file(fp)
if not decrypted.ok:
logging.error("Could not decrypt %s: %s", file, decrypted.status)
failures.append((file, decrypted.status))
continue
entry = {}
set_meta(entry, file, grouping_path)
set_data(entry, str(decrypted), exclude, get_fields, get_lines)
entries.append(entry)
if failures:
for file, status in failures:
logging.warning("Could not decrypt %s: %s", file, status)
write(outfile, entries, get_fields, get_lines)
def parse_args(args):
class ExtendAction(argparse.Action):
# Python 3.8 has 'extend' built in.
def __call__(self, parser, namespace, values, option_string=None):
items = getattr(namespace, self.dest) or []
items.extend(values)
setattr(namespace, self.dest, items)
parser = argparse.ArgumentParser()
parser.add_argument(
'store_path',
type=str,
help="path to the password-store to export",
)
parser.add_argument(
'-b', '--base',
metavar='path',
type=str,
help="path to use as base for grouping passwords",
dest='base_path'
)
parser.add_argument(
'-g', '--gpg',
metavar='executable',
type=str,
default="gpg",
help="path to the gpg binary you wish to use (default 'gpg')",
dest='gpgbinary'
)
parser.add_argument(
'-a', '--use-agent',
action='store_true',
default=False,
help="ask gpg to use its auth agent",
dest='use_agent'
)
parser.add_argument(
'--encoding',
metavar='encoding',
type=str,
default="utf-8",
help="text encoding to use when reading gpg output (default 'utf-8')",
dest='encoding'
)
parser.add_argument(
'-o', '--outfile',
metavar='file',
type=argparse.FileType('w'),
default="-",
help="file to write exported data to (default stdin)",
dest='outfile'
)
parser.add_argument(
'-e', '--exclude',
metavar='pattern',
action=ExtendAction,
nargs='+',
type=str,
default=[],
help="regexps for lines which should not be exported",
dest='exclude'
)
parser.add_argument(
'-f', '--get-field',
metavar=('name', 'pattern'),
action='append',
nargs=2,
type=str,
default=[],
help=(
"a name and a regexp, the part of the line matching the regexp"
" will be removed and the remaining line will be added to a field"
" with the chosen name. only one match per password, matching"
" stops after the first match"
),
dest='get_fields'
)
parser.add_argument(
'-l', '--get-line',
metavar=('name', 'pattern'),
action='append',
nargs=2,
type=str,
default=[],
help=(
"a name and a regexp for which all lines that match are included"
" in a field with the chosen name"
),
dest='get_lines'
)
return parser.parse_args(args)
def compile_regexp(pattern):
try:
regexp = re.compile(pattern, re.I)
except re.error as e:
logging.error(
"Could not compile pattern '%s', %s at position %s",
pattern.replace("'", "\\'"), e.msg, e.pos
)
return None
return regexp
2018-12-27 12:12:06 -05:00
2016-05-29 19:00:02 -04:00
if __name__ == '__main__':
parsed = parse_args(sys.argv[1:])
failed = False
exclude_patterns = []
for pattern in parsed.exclude:
regexp = compile_regexp(pattern)
if not regexp:
failed = True
exclude_patterns.append(regexp)
get_fields = []
for name, pattern in parsed.get_fields:
regexp = compile_regexp(pattern)
if not regexp:
failed = True
get_fields.append((name, regexp))
get_lines = []
for name, pattern in parsed.get_lines:
regexp = compile_regexp(pattern)
if not regexp:
failed = True
get_lines.append((name, regexp))
if failed:
sys.exit(1)
if parsed.base_path:
grouping_base = parsed.base_path
else:
grouping_base = parsed.store_path
kwargs = {
'store_path': parsed.store_path,
'grouping_base': grouping_base,
'gpgbinary': parsed.gpgbinary,
'use_agent': parsed.use_agent,
'encoding': parsed.encoding,
'outfile': parsed.outfile,
'exclude': exclude_patterns,
'get_fields': get_fields,
'get_lines': get_lines
}
main(**kwargs)