Skip to content

Commit

Permalink
black spm_parser
Browse files Browse the repository at this point in the history
  • Loading branch information
remiadon committed Feb 3, 2021
1 parent b6584cd commit c2a4c29
Show file tree
Hide file tree
Showing 3 changed files with 38 additions and 35 deletions.
6 changes: 6 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
repos:
- repo: https://github.com/python/black
rev: stable
hooks:
- id: black
language_version: python3.7
65 changes: 30 additions & 35 deletions bids_prov/spm_parser.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,20 @@
import os
import sys
import click
import json
import re

from collections import defaultdict
import pyld as ld
import warnings

dir_path = os.path.dirname(os.path.realpath(__file__))

import random
import string

get_id = lambda : ''.join(random.choice(string.ascii_letters) for i in range(10))
get_id = lambda: "".join(random.choice(string.ascii_letters) for i in range(10))


def realines(filename):
with open(filename) as fd:
for line in fd:
if line.startswith('matlabbatch'):
if line.startswith("matlabbatch"):
yield line[:-1] # remove "\n"


Expand All @@ -27,56 +23,55 @@ def group_lines(lines):
key = lambda line: re.finditer(r"\{\d+\}", line)
for line in lines:
a = next(re.finditer(r"\{\d+\}", line), None)
b = line.split('.')[2]
b = line.split(".")[2]
if a and b:
g = a.group()
k = (b, g)
res[k].append(line[len(f"matlabbatch{g}."):])
res[k].append(line[len(f"matlabbatch{g}.") :])

return dict(res)


def get_records(task_groups, records=defaultdict(list)):
entities_ids = set()
for _, values in task_groups.items():
activity_name = ''.join(_)
activity_name = "".join(_)
activity_id = "niiri:" + activity_name + get_id()
activity = {
"@id" : activity_id,
"label" : ''.join(activity_name),
"@id": activity_id,
"label": "".join(activity_name),
}
# TODO : add time to activity
#import pdb; pdb.set_trace()
# import pdb; pdb.set_trace()
used = list()
entities = []
for v in values:
entity_split = v.split(' = ')
entity_split = v.split(" = ")
if len(entity_split) == 2:
left, right = entity_split
entity_label = left.split('/')[-1].split('.')[0]
entity_label = left.split("/")[-1].split(".")[0]
entity = {
"@id" : "niiri:" + entity_label + get_id(),
"label": entity_label,
"@id": "niiri:" + entity_label + get_id(),
"label": entity_label,
"prov:atLocation": right[2:-3],
"wasGeneratedBy": activity_id,
}

entities.append(entity)

activity['used'] = [e["@id"] for e in entities]
activity["used"] = [e["@id"] for e in entities]
records["prov:Activity"].append(activity)
for e in entities:
if e["@id"] not in entities_ids:
records["prov:Entity"].append(e)
entities_ids.add(e["@id"])


return records



@click.command()
@click.argument('filenames', nargs=-1)
@click.option('--output-file', '-o', required=True)
@click.argument("filenames", nargs=-1)
@click.option("--output-file", "-o", required=True)
def spm_to_bids_prov(filenames, output_file):
filename = filenames[0] # FIXME

Expand All @@ -91,30 +86,30 @@ def spm_to_bids_prov(filenames, output_file):
"wasAssociatedWith": {
"@id": "NIH",
"@type": "Organization",
"hadRole": "Funding"
}
"hadRole": "Funding",
},
},
"records": {
"prov:Agent": [{
"@id": "RRID:SCR_007037", # TODO query for version
"@type": "prov:SoftwareAgent",
"label": "SPM"
}],
"prov:Agent": [
{
"@id": "RRID:SCR_007037", # TODO query for version
"@type": "prov:SoftwareAgent",
"label": "SPM",
}
],
"prov:Activity": [],
"prov:Entity": [],
}
},
}

lines = realines(filename)
tasks = group_lines(lines)
records = get_records(tasks)
graph["records"].update(records)

with open(output_file, 'w') as fd:
with open(output_file, "w") as fd:
json.dump(graph, fd, indent=2)




if __name__ == '__main__':
sys.exit(spm_to_bids_prov())
if __name__ == "__main__":
sys.exit(spm_to_bids_prov())
2 changes: 2 additions & 0 deletions dev-requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
black
pre-commit

0 comments on commit c2a4c29

Please sign in to comment.