More sanitization options
This commit is contained in:
parent
a602b8064c
commit
c53b64203d
1 changed files with 85 additions and 6 deletions
89
pass2csv.py
89
pass2csv.py
|
@ -92,7 +92,7 @@ bwFieldSet = OrderedDict({
|
||||||
|
|
||||||
header = [key for key, value in bwFieldSet.items()]
|
header = [key for key, value in bwFieldSet.items()]
|
||||||
|
|
||||||
def write(file, entries, get_fields, get_lines):
|
def write(file, entries, get_fields, get_lines, notes, ignore, usernames, no_degroup):
|
||||||
get_field_names = set(x[0] for x in get_fields)
|
get_field_names = set(x[0] for x in get_fields)
|
||||||
get_line_names = set(x[0] for x in get_lines)
|
get_line_names = set(x[0] for x in get_lines)
|
||||||
field_names = get_field_names | get_line_names
|
field_names = get_field_names | get_line_names
|
||||||
|
@ -100,14 +100,30 @@ def write(file, entries, get_fields, get_lines):
|
||||||
stderr(f"\nWriting data to {file.name}\n")
|
stderr(f"\nWriting data to {file.name}\n")
|
||||||
csvw.writerow(header)
|
csvw.writerow(header)
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
if entry['title'] == 'ChromePasswords':
|
if entry['title'] in ignore:
|
||||||
continue
|
continue
|
||||||
columns = []
|
columns = []
|
||||||
fieldIndex = -1
|
fieldIndex = -1
|
||||||
|
if entry['group'] not in no_degroup:
|
||||||
|
for upat in usernames:
|
||||||
|
m = upat.search(entry['title'])
|
||||||
|
if m:
|
||||||
|
# username match, clean up the entry
|
||||||
|
if entry['fields'].get('username') in ['', None]:
|
||||||
|
entry['fields']['username'] = m[0]
|
||||||
|
else:
|
||||||
|
entry['fields']['titleuser'] = m[0]
|
||||||
|
oldTitle = entry['title']
|
||||||
|
entry['title'] = upat.sub(str(entry['group']), str(entry['title'])).rstrip(":")
|
||||||
|
entry['fields']['passEntry'] = '{}/{}'.format(entry['group'], oldTitle)
|
||||||
|
entry['group'] = ''
|
||||||
for bwf, pf in bwFieldSet.items():
|
for bwf, pf in bwFieldSet.items():
|
||||||
if pf is None:
|
if pf is None:
|
||||||
columns.append('')
|
columns.append('')
|
||||||
elif pf == 'type':
|
elif pf == 'type':
|
||||||
|
if entry['title'] in notes:
|
||||||
|
columns.append('note')
|
||||||
|
else:
|
||||||
columns.append('login')
|
columns.append('login')
|
||||||
elif pf == '__fields':
|
elif pf == '__fields':
|
||||||
columns.append('')
|
columns.append('')
|
||||||
|
@ -133,7 +149,7 @@ def write(file, entries, get_fields, get_lines):
|
||||||
|
|
||||||
|
|
||||||
def main(store_path, outfile, grouping_base, gpgbinary, use_agent, encodings,
|
def main(store_path, outfile, grouping_base, gpgbinary, use_agent, encodings,
|
||||||
exclude, get_fields, get_lines):
|
exclude, get_fields, get_lines, notes, ignore, usernames, no_degroup):
|
||||||
entries = []
|
entries = []
|
||||||
failures = []
|
failures = []
|
||||||
path = pathlib.Path(store_path)
|
path = pathlib.Path(store_path)
|
||||||
|
@ -181,7 +197,7 @@ def main(store_path, outfile, grouping_base, gpgbinary, use_agent, encodings,
|
||||||
if not entries:
|
if not entries:
|
||||||
stderr("\nNothing to write.")
|
stderr("\nNothing to write.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
write(outfile, entries, get_fields, get_lines)
|
write(outfile, entries, get_fields, get_lines, notes, ignore, usernames, no_degroup)
|
||||||
|
|
||||||
|
|
||||||
def parse_args(args=None):
|
def parse_args(args=None):
|
||||||
|
@ -286,6 +302,58 @@ def parse_args(args=None):
|
||||||
version='%(prog)s ' + __version__
|
version='%(prog)s ' + __version__
|
||||||
)
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'-n', '--note',
|
||||||
|
metavar='title',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help=(
|
||||||
|
"a title of entry that should be added as a note, "
|
||||||
|
"can be specified multiple times"
|
||||||
|
),
|
||||||
|
dest='notes'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'-i', '--ignore',
|
||||||
|
metavar='title',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help=(
|
||||||
|
"a title of entry that should be ignored, "
|
||||||
|
"can be specified multiple times"
|
||||||
|
),
|
||||||
|
dest='ignore'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'-u', '--username',
|
||||||
|
metavar='title',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help=(
|
||||||
|
"a regex matching a username in title, "
|
||||||
|
"can be specified multiple times. any match will be replaced with the group name"
|
||||||
|
),
|
||||||
|
dest='usernames'
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
'-D', '--no-degroup',
|
||||||
|
metavar='title',
|
||||||
|
action='append',
|
||||||
|
type=str,
|
||||||
|
default=[],
|
||||||
|
help=(
|
||||||
|
"a list of groups not to perform username degrouping on, "
|
||||||
|
"can be specified multiple times."
|
||||||
|
),
|
||||||
|
dest='no_degroup'
|
||||||
|
)
|
||||||
|
|
||||||
return parser.parse_args(args)
|
return parser.parse_args(args)
|
||||||
|
|
||||||
|
|
||||||
|
@ -324,6 +392,13 @@ def cli():
|
||||||
failed = True
|
failed = True
|
||||||
get_lines.append((name, regexp))
|
get_lines.append((name, regexp))
|
||||||
|
|
||||||
|
username_patterns = []
|
||||||
|
for pattern in parsed.usernames:
|
||||||
|
regexp = compile_regexp(pattern)
|
||||||
|
if not regexp:
|
||||||
|
failed = True
|
||||||
|
username_patterns.append(regexp)
|
||||||
|
|
||||||
if failed:
|
if failed:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
@ -346,7 +421,11 @@ def cli():
|
||||||
'encodings': encodings,
|
'encodings': encodings,
|
||||||
'exclude': exclude_patterns,
|
'exclude': exclude_patterns,
|
||||||
'get_fields': get_fields,
|
'get_fields': get_fields,
|
||||||
'get_lines': get_lines
|
'get_lines': get_lines,
|
||||||
|
'notes': parsed.notes,
|
||||||
|
'ignore': parsed.ignore,
|
||||||
|
'usernames': username_patterns,
|
||||||
|
'no_degroup': parsed.no_degroup
|
||||||
}
|
}
|
||||||
|
|
||||||
main(**kwargs)
|
main(**kwargs)
|
||||||
|
|
Loading…
Reference in a new issue