#!/usr/bin/env python # coding: utf-8 # # Convert ns-ALEX HT3 files to Photon-HDF5 # # ## Summary # This executable document (is called a [Jupyter notebook](http://ipython.org/notebook.html)) will guide you through the conversion of a ns-ALEX data file from **HT3** to [Photon-HDF5](http://photon-hdf5.org) format. # # If it's the first time you are using a Jupyter notebook please click on *Help* -> *User Interface Tour* for a quick tour of the interface. # # In this notebook there are text cells, like this paragraph, and code cells, containing the code to be executed. # To execute a selected code cell hit SHIFT+ENTER. To edit a code cell it must be selected and with a green frame around it. # # If you are reading this notebook online, please refer to # [this quick-start guide](http://jupyter-notebook-beginner-guide.readthedocs.org) for instructions # on how to install the required software and run a Jupyter notebook on your machine. # # ## Prepare the data files # Your can run this notebook using example data files available # [on figshare](http://dx.doi.org/10.6084/m9.figshare.1455963). If you use these example files, # please unzip them and put them in a folder named "data" (lower case) inside the folder # containing this notebook. # # Alternatively, you can use on your own HT3 file. In this case you need to paste # the full path of your HT3 file in the following cell, replacing value between single quotes `'`. # In[ ]: filename = r'data/Pre.ht3' # The next cell will check if the `filename` location is correct: # In[ ]: import os try: with open(filename): pass print('Data file found, you can proceed.') except IOError: print('ATTENTION: Data file not found, please check the filename.\n' ' (current value "%s")' % filename) # In case of file not found, please double check that you put the example # data files in the "data" folder, or that the path you put in `filename` # is correct. Please re-execute the last two cells until the file is found. # ## Data file description # # Here we specify the additional metadata that will be stored # in the Photon-HDF5 file. If you are using the example file you don't need to # edit any of these. If using your own file, please modify these description # accordingly. # ### Author # # These fields will go in the [identity](http://photon-hdf5.readthedocs.org/en/latest/phdata.html#identity-group) group: # In[ ]: author = 'Eitan Lerner' author_affiliation = 'UCLA' creator = 'Antonino Ingargiola' creator_affiliation = 'UCLA' # ### Sample # # These fields will go in the [sample](http://photon-hdf5.readthedocs.org/en/latest/phdata.html#sample-group) group: # In[ ]: description = 'A demostrative smFRET-nsALEX measurement.' sample_name = 'Doubly-labeled ssDNA partially hybridized to a complementary strand.' dye_names = ['ATTO488', 'ATTO647N'] buffer_name = 'Tris20 mM Ph 7.8' # Please edit the previous cells and execute them (SHIFT+ENTER) to make sure # there are no errors. Then proceed to the next section. # ## Load the data # # Before loading the data we need to load a few library functions: # In[ ]: get_ipython().run_line_magic('matplotlib', 'inline') import matplotlib.pyplot as plt import numpy as np import phconvert as phc print('phconvert version: ' + phc.__version__) # Next, we can load the input file and assign the measurement parameters # ([measurement_specs](http://photon-hdf5.readthedocs.org/en/latest/phdata.html#measurement-specs)), # necessary to create a complete Photon-HDF5 file. # # If using your own file, please review all the parameters in the next cell. # In[ ]: d, meta = phc.loader.nsalex_ht3(filename, donor = 0, acceptor = 1, alex_period_donor = (150, 1500), alex_period_acceptor = (1540, 3050), excitation_wavelengths = (470e-9, 635e-9), detection_wavelengths = (525e-9, 690e-9), time_reversed = False) # The next cell plots a `nanotimes` histogram for the donor and acceptor channel separately. # The shaded areas marks the donor (*green*) and acceptor (*red*) excitation periods. # # If the histogram looks wrong in some aspects (no photons, wrong detectors # assignment, wrong period selection) please go back to the previous cell # and tweak the relevant parameters until the histogram looks correct. # In[ ]: fig, ax = plt.subplots(figsize=(10, 4)) phc.plotter.alternation_hist(d, ax=ax) # You may also find useful to see how many different detectors are present # and their number of photons. This information is shown in the next cell. # In[ ]: detectors = d['photon_data']['detectors'] print("Detector Counts") print("-------- --------") for det, count in zip(*np.unique(detectors, return_counts=True)): print("%8d %8d" % (det, count)) # # File conversion # # Once you finished editing the the previous sections you can proceed with # the actual conversion. It is suggested to execute the notebook in # one step by clicking on the menu *Cells* -> *Run All*. # # After that, you should find a new `.hdf5` file in the same folder of the input # file. You can check it's content by using [HDFView](https://www.hdfgroup.org/products/java/hdfview/). # # The cells below contain the code to convert the input file to Photon-HDF5. # ## Add metadata # In[ ]: d['description'] = description d['sample'] = dict( sample_name=sample_name, dye_names=[n.encode() for n in dye_names], buffer_name=buffer_name, num_dyes = len(dye_names)) d['identity'] = dict( author=author, author_affiliation=author_affiliation, creator=creator, creator_affiliation=creator_affiliation) # ## Validate the Photon-HDF5 structure # # Before writing to disk, we assure the file structure follows the Photon-HDF5 format: # In[ ]: phc.hdf5.assert_valid_photon_hdf5(d) # ## Save to Photon-HDF5 # # This command saves the new file to disk. # In[ ]: phc.hdf5.save_photon_hdf5(d, close=False, overwrite=True) # In[ ]: #d['_data_file'].close() # ## Save HT3 metadata # # Here we save a custom (*user*) group where we put all the metadata # found in the input .HT3 file. The important metadata from the .HT3 # file is already saved in the standard Photon-HDF5 fields. # # Here we save the full original metadata in order to make sure that # no information is lost during the conversion. # In[ ]: h5file = d['_data_file'] h5file.create_group('/', 'user') pq_group = h5file.create_group('/user', 'picoquant') for key in meta: if np.size(meta[key]) > 0: h5file.create_table(pq_group, key, obj=meta[key]) # In[ ]: #h5file.close() # ## Print HDF5 file content # # Finally we print the file content to see what's inside the newly-created Photon-HDF5. Here we print the content of the roor node: # In[ ]: phc.hdf5.print_children(h5file.root) # And here we retrieve some information from the user group: # In[ ]: phc.hdf5.print_children(h5file.root.user.picoquant) # In[ ]: h5file.close() # ## Load Photon-HDF5 # # Finally we try to reload the file to check that there are no errors. # In[ ]: from pprint import pprint # In[ ]: filename = d['_data_file'].filename # In[ ]: h5data = phc.hdf5.load_photon_hdf5(filename) # In[ ]: phc.hdf5.dict_from_group(h5data.identity) # In[ ]: phc.hdf5.dict_from_group(h5data.setup) # In[ ]: pprint(phc.hdf5.dict_from_group(h5data.photon_data)) # If the next cell output shows "OK" then the execution is terminated. # In[ ]: print('OK')