Skip to content

Instantly share code, notes, and snippets.

@maedoc
Created April 28, 2014 12:40
Show Gist options
  • Save maedoc/11370642 to your computer and use it in GitHub Desktop.
Save maedoc/11370642 to your computer and use it in GitHub Desktop.
Example import adapter for TVB
# auth: Lia Domide
import numpy
from tvb.adapters.uploaders.abcuploader import ABCUploader
from tvb.basic.logger.builder import get_logger
from tvb.datatypes.time_series import TimeSeries
class FooDataImporter(ABCUploader):
_ui_name = "Foo Data"
_ui_subsection = "foo_data_importer"
_ui_description = "Foo data import"
logger = get_logger(__name__)
def get_upload_input_tree(self):
return [
{'name': 'array_data',
"type": "upload",
#'type': "array", "quantifier": "manual",
'required_type': '.npy',
'label': 'please upload npy',
'required': 'true'}
]
def get_output(self):
return [TimeSeries]
def launch(self, array_data):
array_data = numpy.loadtxt(array_data)
ts = TimeSeries()
ts.storage_path = self.storage_path
#ts.configure()
ts.write_data_slice(array_data)
ts.close_file()
return ts
# auth: lia Domide
## Select the profile with storage enabled, but without web interface:
from tvb.basic.profile import TvbProfile as tvb_profile
tvb_profile.set_profile(["-profile", "CONSOLE_PROFILE"], try_reload=False)
from tvb.core.traits import db_events
from tvb.core.entities.model import AlgorithmGroup, Algorithm
from tvb.core.entities.storage import dao
from tvb.core.services.flow_service import FlowService
from tvb.core.services.operation_service import OperationService
from new_importer import FooDataImporter
## Before starting this, we need to have TVB web interface launched at least once (to have a default project, user, etc)
if __name__ == "__main__":
## Hook DB events (like prepare json attributes on traited DataTypes):
db_events.attach_db_events()
flow_service = FlowService()
operation_service = OperationService()
## This ID of a project needs to exists in Db, and it can be taken from the WebInterface:
project = dao.get_project_by_id(1)
## This is our new added Importer:
adapter_instance = FooDataImporter()
## We need to store a reference towards the new algorithms also in DB:
# First select the category of uploaders:
upload_category = dao.get_uploader_categories()[0]
# check if the algorithm has been added in DB already
my_group = dao.find_group(FooDataImporter.__module__, FooDataImporter.__name__)
if my_group is None:
# not stored in DB previously, we will store it now:
my_group = AlgorithmGroup(FooDataImporter.__module__, FooDataImporter.__name__, upload_category.id)
my_group = dao.store_entity(my_group)
dao.store_entity(Algorithm(my_group.id, "", "FooName"))
adapter_instance.algorithm_group = my_group
## Prepare the input algorithms as if they were coming from web UI submit:
#launch_args = {"array_data": "[1, 2, 3, 4, 5]"}
launch_args = {"array_data" : "demo_array.txt"}
## launch an operation and have the results sotored both in DB and on disk
launched_operations = flow_service.fire_operation(adapter_instance,
project.administrator,
project.id,
**launch_args)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment