sphire_beta20170901_patch20170906/ 0000775 0030616 0076400 00000000000 13154460510 016724 5 ustar stabrin Domain Users sphire_beta20170901_patch20170906/.DS_Store 0000775 0030616 0076400 00000020004 13171343421 020406 0 ustar stabrin Domain Users Bud1 spblob bp @ @ @ @ s r cbwspblob bplist00
]ShowStatusBar[ShowSidebar[ShowPathbar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds\SidebarWidth_PreviewPaneVisibility _{{959, 97}, {1439, 1320}}})5AMYp} s r cdsclbool s r clsvPblob Qbplist00
AB
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=
ZidentifierUwidthYascendingWvisibleTname
\dateModified [dateCreated !
Tsizea %&
Tkinds *+
Ulabeld /0
WversionK 45
Xcomments, 9:^dateLastOpened>YdateAdded#@( #@0 . @ T \ e p y "$%&/5789BJLMNW`cden} D s r clsvpblob Wbplist00
EF
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=BXcommentsTname[dateCreatedTsizeUlabelTkindWversion^dateLastOpened\dateModified
UindexUwidthYascendingWvisible, !
%&*+
a /0
d 45
s 9:
K >? &
#@( #@0 . @ T \ e p y (*-./8:<=>GIKLMVXZ[\egijktvxyz H s r cvSrnlong E DSDB ` @ @ @ /0
d 45
s 9:
K >? &
#@( #@0 . @ T \ e p y (*-./8:<=>GIKLMVXZ[\egijktvxyz H s r cvSrnlong sphire_beta20170901_patch20170906/src/ 0000775 0030616 0076400 00000000000 13153766756 017537 5 ustar stabrin Domain Users sphire_beta20170901_patch20170906/src/.DS_Store 0000775 0030616 0076400 00000020004 13171343410 021173 0 ustar stabrin Domain Users Bud1 2bwspblob @ @ @ @ e m a n 2bwspblob bplist00
]ShowStatusBar[ShowSidebar[ShowPathbar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds\SidebarWidth_PreviewPaneVisibility _{{959, 97}, {1439, 1320}}})5AMYp} e m a n 2dsclbool e m a n 2lsvPblob Qbplist00
AB
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=
ZidentifierUwidthYascendingWvisibleTname
\dateModified [dateCreated !
Tsizea %&
Tkinds *+
Ulabeld /0
WversionK 45
Xcomments, 9:^dateLastOpened>YdateAdded#@( #@0 . @ T \ e p y "$%&/5789BJLMNW`cden} D e m a n 2lsvpblob Wbplist00
EF
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=BXcommentsTname[dateCreatedTsizeUlabelTkindWversion^dateLastOpened\dateModified
UindexUwidthYascendingWvisible, !
%&*+
a /0
d 45
s 9:
K >? &
#@( #@0 . @ T \ e p y (*-./8:<=>GIKLMVXZ[\egijktvxyz H e m a n 2vSrnlong E DSDB ` @ @ @ *+
a /0
d 45
s 9:
K >? &
#@( #@0 . @ T \ e p y (*-./8:<=>GIKLMVXZ[\egijktvxyz H e m a n 2vSrnlong sphire_beta20170901_patch20170906/src/eman2/ 0000775 0030616 0076400 00000000000 13153767131 020525 5 ustar stabrin Domain Users sphire_beta20170901_patch20170906/src/eman2/.DS_Store 0000775 0030616 0076400 00000020004 13153777134 022213 0 ustar stabrin Domain Users Bud1 xbwspblob @ @ @ @ s p a r xbwspblob bplist00
]ShowStatusBar[ShowSidebar[ShowPathbar[ShowToolbar[ShowTabView_ContainerShowSidebar\WindowBounds\SidebarWidth_PreviewPaneVisibility _{{959, 97}, {1439, 1320}}})5AMYp} s p a r xdsclbool s p a r xlsvPblob Qbplist00
AB
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=
ZidentifierUwidthYascendingWvisibleTname
\dateModified [dateCreated !
Tsizea %&
Tkinds *+
Ulabeld /0
WversionK 45
Xcomments, 9:^dateLastOpened>YdateAdded#@( #@0 . @ T \ e p y "$%&/5789BJLMNW`cden} D s p a r xlsvpblob Wbplist00
EF
_viewOptionsVersion_showIconPreview_calculateAllSizesWcolumnsXtextSizeZsortColumnXiconSize_useRelativeDates
$).38=BXcomments^dateLastOpened[dateCreatedTsizeUlabelTkindWversionTname\dateModified
UindexUwidthYascendingWvisible, !%&*+
a /0
d 45
s 9:
K >?
&
#@( #@0 . @ T \ e p y (*,-.79;<=FHJKLUWYZ[dfhijsuwxy H s p a r xvSrnlong E DSDB ` @ @ @ *+
a /0
d 45
s 9:
K >?
&
#@( #@0 . @ T \ e p y (*,-.79;<=FHJKLUWYZ[dfhijsuwxy H s p a r xvSrnlong sphire_beta20170901_patch20170906/src/eman2/sparx/ 0000775 0030616 0076400 00000000000 13153777333 021667 5 ustar stabrin Domain Users sphire_beta20170901_patch20170906/src/eman2/sparx/.DS_Store 0000775 0030616 0076400 00000014004 13171343410 023335 0 ustar stabrin Domain Users Bud1 % b i ndsclbool clbool @ @ @ @ E % DSDB ` @ @ @ sphire_beta20170901_patch20170906/src/eman2/sparx/bin/ 0000775 0030616 0076400 00000000000 13153746301 022426 5 ustar stabrin Domain Users sphire_beta20170901_patch20170906/src/eman2/sparx/bin/sxgui_meridien.py 0000775 0030616 0076400 00000045373 13153746162 026037 0 ustar stabrin Domain Users #!/usr/bin/env python
# sxgui_meridien for analysing meridien outputs.
# Copyright (C) 2017 Thorsten Wagner (thorsten.wagner@mpi-dortmund.mpg.de)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see .
from PyQt4.QtCore import QObject, pyqtSignal, pyqtSlot, QThread, QString, QThreadPool, QTimer
from PyQt4 import QtCore, QtGui
from PyQt4.QtGui import QDialog, QGridLayout, QTreeWidget, QMessageBox, QFontMetrics
import matplotlib.pyplot as plt
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
import os
import os.path
import json
import sys
import numpy as np
class DriverFileReader(QObject):
sig_readfsc = pyqtSignal(object)
sig_readfolders = pyqtSignal(str)
sig_sendfolders = pyqtSignal(object)
sig_sendfsc = pyqtSignal(object,object,object,object)
def __init__(self):
super(DriverFileReader, self).__init__()
self.sig_readfsc.connect(self.handle_read_fsc_triggered)
self.sig_readfolders.connect(self.handle_read_refinement_folders)
@pyqtSlot(str)
def handle_read_fsc_triggered(self, paths):
"""
Slot for reading fsc triggered
:return: 1D array with fsc values
"""
runnames = [os.path.join(*word.split("/")[-2:]) for word in paths]
runnumbers = [word[-3:] for word in paths]
pixelsizes = []
nnxos = []
for i,path in enumerate(paths):
tracker_reader = TrackerFileReader()
trackerPath = "{0}Tracker_{1}.json".format(path+"/", runnumbers[i])
v = tracker_reader.read_pixelsize_and_nnxo(trackerPath)
pixelsizes.append(v[0])
nnxos.append(v[1])
self.sig_sendfsc.emit(self.read_fsc_values(paths),runnames,pixelsizes,nnxos)
def read_fsc_values(self,paths):
"""
Reads the fsc value from the driver.txt file
:param paths: Paths to run-folders (eg. /my/path/main001)
:return: nd array with fsc values
"""
dataOfRuns = []
for path in paths:
driver_path = "{0}/driver_{1}.txt".format(path,path[-3:])
data = np.genfromtxt(driver_path, usecols=0)
dataOfRuns.append(data)
return dataOfRuns
@pyqtSlot(str)
def handle_read_refinement_folders(self,path):
self.sig_sendfolders.emit(path)
@staticmethod
def read_refinement_folders(path):
"""
Reads the folder structure of a refinement folder
:param path: Path to the refinement folder which contains the main folders
:return: list of string with mainXXX folder names
"""
main_dict = []
for dict in os.listdir(path):
combined_path = os.path.join(str(path),dict)
if os.path.isdir(combined_path) and ("main" in dict) and ("000" not in dict):
valid = False
for files in os.listdir(combined_path):
if "Tracker" in files:
valid = True
if valid:
main_dict.append(dict)
return main_dict
class FSCPlot(QDialog):
def __init__(self,parent=None):
super(FSCPlot,self).__init__(parent)
self.figure = plt.figure()
# this is the Canvas Widget that displays the `figure`
# it takes the `figure` instance as a parameter to __init__
self.canvas = FigureCanvas(self.figure)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.toolbar = NavigationToolbar(self.canvas, self)
# set the layout
layout = QtGui.QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.setLayout(layout)
def plot(self, fscvalues, names, pixelsizes, boxsizes):
"""
Plots the FSC curves for several runs
:param fscvalues: nd-array with fsc values. Row corresponds to an run.
:param names: list of names of each run. These names are snown as legend.
:param pixelsizes: list of used pixelsize for each run
:param boxsizes: list of the used boxsize for each run
:return: none, just plotting..
"""
if not fscvalues:
return
# create an axis
ax = self.figure.add_subplot(111)
for i,entry in enumerate(fscvalues):
fsc_x = np.empty(len(entry), dtype=float)
for fsc_value in range(len(entry)):
fsc_x[fsc_value] = float(fsc_value) / float(pixelsizes[i] * boxsizes[i])
ax.plot(fsc_x, entry,label=names[i])
plt.grid()
plt.title("Fourier shell correlation curves")
plt.ylabel("FSC")
plt.xlabel("Spatial frequency / 1/A")
plt.legend()
plt.gca().set_ylim(bottom=-0.05)
self.canvas.draw()
self.show()
class MonitorRefinementFolder(QObject):
def __init__(self, path, sig_update_tree, parent=None):
super(MonitorRefinementFolder, self).__init__(parent)
self.refinement_folder = path
self.reader = DriverFileReader()
self.mainDicts = set(self.reader.read_refinement_folders(path))
self.sig_update_tree = sig_update_tree
@pyqtSlot()
def update(self):
'''
Checks if there is a new main folder in a specified refinement folder and updates the QTree. If the
refinement folder was deleted, the corresponding element in the QTree is deleted.
:return:
'''
if os.path.exists(self.refinement_folder):
current_dictionaries = set(self.reader.read_refinement_folders(self.refinement_folder))
new_dictionaries = current_dictionaries.difference(self.mainDicts)
if len(new_dictionaries) >= 1:
list_new_dictionaries = list(new_dictionaries)
for dictionary in list_new_dictionaries:
if os.path.isfile("{0}/{1}/driver_{2}.txt".format(self.refinement_folder, dictionary, dictionary[-3:])) and \
os.path.isfile("{0}/{1}/Tracker_{2}.json".format(self.refinement_folder, dictionary, dictionary[-3:])):
next_item = QtGui.QTreeWidgetItem([dictionary])
next_item.setCheckState(0, QtCore.Qt.Unchecked)
delete=False
self.sig_update_tree.emit(next_item, self.refinement_folder, delete)
self.mainDicts = current_dictionaries
else:
delete = True
self.sig_update_tree.emit(None, self.refinement_folder, delete)
self.setParent(None)
class ResolutionOverviewPlot(QDialog):
def __init__(self, parent=None):
super(ResolutionOverviewPlot, self).__init__(parent)
self.figure = plt.figure()
# this is the Canvas Widget that displays the `figure`
# it takes the `figure` instance as a parameter to __init__
self.canvas = FigureCanvas(self.figure)
# this is the Navigation widget
# it takes the Canvas widget and a parent
self.toolbar = NavigationToolbar(self.canvas, self)
# set the layout
layout = QtGui.QVBoxLayout()
layout.addWidget(self.toolbar)
layout.addWidget(self.canvas)
self.setLayout(layout)
def plot(self, resolutions_05, resolution_0143):
"""
Plots the resolution over all runs for FSC 0.5 and FSC 0.143
:param resolutions_05: list of spatial resolution with FSC 0.5
:param resolution_0143: list of spatial resolution with FSC 0.143
:param pixelsizes: list of used pixelsize for each run
:param boxsizes: list of used boxsizes
:return: none
"""
if not resolutions_05 and not resolution_0143:
return
# create an axis
ax = self.figure.add_subplot(111)
xruns = range(1,len(resolution_0143)+1)
ax.plot(xruns, resolution_0143, "x-",label="Resolution FSC 0.143")
xruns = range(1, len(resolutions_05)+1)
ax.plot(xruns, resolutions_05, "x-", label="Resolution FSC 0.5")
plt.grid()
plt.title("Resolution curves")
plt.ylabel("Resolution [A]")
plt.xlabel("Runs")
plt.legend()
self.canvas.draw()
self.show()
class TrackerFileReader:
def __init__(self):
pass
def read_pixelsize_and_nnxo(self, path):
"""
:param path: Path to run folder
:return: list of pixel and boxsize
"""
with open(path, 'r') as read:
tracker = json.load(read)
pixel_size = tracker['constants']['pixel_size']
nnxo = tracker['constants']['nnxo']
return [pixel_size, nnxo]
def _read_res0143_and_res05(self, path):
"""
:param path: Paths to run folder
"""
with open(path, 'r') as read:
tracker = json.load(read)
res_05 = tracker['currentres']
res_0143 = tracker['fsc143']
return [res_0143, res_05]
def read_res0143_and_res05(self, path):
"""
:param path: Path to refinement folder
"""
res_0143 = []
res_05 = []
main_paths = DriverFileReader.read_refinement_folders(path)
for main_path in main_paths:
runnumber = main_path[-3:]
tracker_path = "{0}/{1}/Tracker_{2}.json".format(path, main_path, runnumber)
if os.path.isfile(tracker_path):
pixelsize,boxsize = self.read_pixelsize_and_nnxo(tracker_path)
res0143, res05 = self._read_res0143_and_res05(tracker_path)
res0143 = float(pixelsize * boxsize) / float(res0143)
res05 = float(pixelsize * boxsize) / float(res05)
res_0143.append(res0143)
res_05.append(res05)
return [res_0143, res_05]
class MainWindow(QtGui.QMainWindow):
sig_update_tree = pyqtSignal(object, object, object)
sig_show_overview_plot = pyqtSignal()
def __init__(self, font, parent=None):
super(MainWindow, self).__init__(parent)
self.font = font
self.setWindowTitle("Meridien")
central_widget = QtGui.QWidget(self)
self.setCentralWidget(central_widget)
#Center on screen
resolution = QtGui.QDesktopWidget().screenGeometry()
self.move((resolution.width() / 2) - (self.frameSize().width() / 2),
(resolution.height() / 2) - (self.frameSize().height() / 2))
"""
Setting up menu bar
"""
close_action = QtGui.QAction('Close', self)
close_action.setShortcut("Ctrl+Q")
close_action.setStatusTip('Leave the app')
close_action.triggered.connect(lambda: self.close())
open_refinement_folder = QtGui.QAction('Open Refinement Folder', self)
open_refinement_folder.triggered.connect(self.open_refinement_folder)
self.mainMenu = self.menuBar()
self.fileMenu = self.mainMenu.addMenu('&File')
self.fileMenu.addAction(open_refinement_folder)
self.fileMenu.addAction(close_action)
self.refinement_folder = ""
create_new_fsc_plot = QtGui.QAction('&New FSC plot', self)
create_new_fsc_plot.triggered.connect(self.event_ontriggered_show_fsc_plot)
create_new_overview_plot = QtGui.QAction('&New resolution overview plot', self)
create_new_overview_plot.triggered.connect(self.event_show_resolution_overview_plot)
self.plotMenu = self.mainMenu.addMenu('&Plot')
self.plotMenu.addAction(create_new_fsc_plot)
self.plotMenu.addAction(create_new_overview_plot)
"""
Setup other components
"""
self.layout = QGridLayout(central_widget)
self.setMenuBar(self.mainMenu)
self.tree = QTreeWidget(self)
self.tree.setHeaderHidden(True)
self.layout.addWidget(self.tree, 1, 0)
self.root_items_path_dictionary = {}
# Threads
self.threadpool = QThreadPool()
self.thread_list = []
thr = QThread(self)
thr.start()
self.reader = DriverFileReader()
self.reader.moveToThread(thr)
self.thread_list.append(thr)
self.timer = QTimer(self)
# Connect signals
self.reader.sig_sendfolders.connect(self.fill_tree)
self.reader.sig_sendfsc.connect(self.show_dialog_fsc)
self.tree.itemChanged.connect(self._event_select_deselect_all)
self.sig_update_tree.connect(self.update_tree)
self.sig_show_overview_plot.connect(self.event_show_resolution_overview_plot)
self.show()
self.open_refinement_folder()
self.monitor = None
@pyqtSlot(object,object,object)
def update_tree(self, item, monitored_folder, delete):
'''
:param item: main folder to add to tree item belonging to folder montiored_folder
:param monitored_folder: Refinement folder
:param delete: if this is true, it will delete the monitored folder
:return:
'''
if delete:
root = self.root_items_path_dictionary[monitored_folder]
index = self.tree.indexOfTopLevelItem(root)
self.tree.takeTopLevelItem(index)
else:
root = self.root_items_path_dictionary[monitored_folder]
root.addChild(item)
msg = QMessageBox()
msg.setIcon(QMessageBox.Information)
msg.setText("Meridien - There is data of a new run: " + str(item.text(0)))
msg.setWindowTitle("Update")
msg.exec_()
@pyqtSlot(object, object, object, object)
def show_dialog_fsc(self, *args):
"""
Shows fsc plot
:param args: list of arguments
:return: None
"""
fsc_plot = FSCPlot(self)
fsc_plot.plot(*args)
@pyqtSlot()
def event_show_resolution_overview_plot(self):
"""
Trigger for fsc plot
:return: None
"""
tracker_reader = TrackerFileReader()
res_0143, res_05 = tracker_reader.read_res0143_and_res05(self.refinement_folder)
res_plot = ResolutionOverviewPlot(parent=self)
res_plot.setWindowTitle(self.refinement_folder)
res_plot.plot(res_05, res_0143)
def event_ontriggered_show_fsc_plot(self):
"""
Trigger for fsc plot
:return: none
"""
checked_runs = self._get_checked_items()
paths = []
for checked_run in checked_runs:
refinement_folder = checked_run.parent().text(0)
path = os.path.join(str(refinement_folder), str(checked_run.text(0)))
paths.append(path)
self.reader.sig_readfsc.emit(paths)
def _event_select_deselect_all(self, root_tree_item):
'''
Checks all childs of root_tree_item
:param root_tree_item: QTreeWidgetItem which represents a refinement folder
:return:
'''
if root_tree_item in self.root_items_path_dictionary.values():
number_of_runs = root_tree_item.childCount()
for i in range(number_of_runs):
child_run = root_tree_item.child(i)
child_run.setCheckState(0, root_tree_item.checkState(0))
def _get_checked_items(self):
"""
Finds the checked elements in tree
:return: List of checked QItemWidget
"""
checked_runs = []
for root in self.root_items_path_dictionary.values():
number_of_runs = root.childCount()
for i in range(number_of_runs):
main_run = root.child(i)
if main_run.checkState(0) == QtCore.Qt.Checked:
checked_runs.append(main_run)
return checked_runs
def open_refinement_folder(self):
"""
Let the user choose the refinement folder and adds it to the RefinementFolder-Tree
:return: none
"""
self.refinement_folder = str(QtGui.QFileDialog.getExistingDirectory(self, "Select Refinement Directory"))
if self.refinement_folder == "":
return
if self.refinement_folder in self.root_items_path_dictionary:
return
self._open_refinement_folder(self.refinement_folder)
self.sig_show_overview_plot.emit()
def _open_refinement_folder(self, path):
"""
Reads the refinement folder, setup the folder daemon and signals
:param path: Path to refinement folder
"""
if path != '':
#for i in reversed(range(self.root.childCount())):
# self.root.removeChild(self.root.child(i))
name = os.path.basename(path)
qname = QString(name)
root = QtGui.QTreeWidgetItem([str(path)])
self.root_items_path_dictionary[str(path)] = root
self.tree.addTopLevelItem(root)
fm = QFontMetrics(self.font)
w = fm.width(path)
self.tree.setMinimumWidth(w+150)
#self.root.setText(0, qname)
self.reader.sig_readfolders.emit(path)
self.monitor = MonitorRefinementFolder(path, self.sig_update_tree,self)
self.timer = QTimer(self)
self.timer.timeout.connect(self.monitor.update)
self.timer.start(2000)
def closeEvent(self, close_event):
'''
Closes all threads.
'''
for thr in self.thread_list:
thr.quit()
thr.wait()
@pyqtSlot(object)
def fill_tree(self, path_to_refinement_folder):
'''
Reads all runs in path_to_refinement_folder and add them as child to the corresponding root element in the tree
:param path_to_refinement_folder: Path to refinement folder
:return: none
'''
root = self.root_items_path_dictionary[str(path_to_refinement_folder)]
root.setCheckState(0,QtCore.Qt.Unchecked)
main_dicts = DriverFileReader.read_refinement_folders(path_to_refinement_folder)
for dictionary in main_dicts:
next_item = QtGui.QTreeWidgetItem([dictionary])
next_item.setCheckState(0, QtCore.Qt.Unchecked)
root.addChild(next_item)
def close_application(self):
'''
Close the application
:return: none
'''
sys.exit()
def run(args=None):
app = QtGui.QApplication(sys.argv)
gui = MainWindow(app.font())
sys.exit(app.exec_())
if __name__ == '__main__':
run()
sphire_beta20170901_patch20170906/src/eman2/sparx/bin/sxgui.py 0000775 0030616 0076400 00001347547 13153776427 024203 0 ustar stabrin Domain Users #!/usr/bin/env python
#
# Authors:
# Toshio Moriya, 11/11/2015 (toshio.moriya@mpi-dortmund.mpg.de)
# Markus Stabrin, 09/06/2016 (markus.stabrin@mpi-dortmund.mpg.de)
#
# This software is issued under a joint BSD/GNU license. You may use the
# source code in this file under either license. However, note that the
# complete EMAN2 and SPHIRE software packages have some GPL dependencies,
# so you are responsible for compliance with the licenses of these packages
# if you opt to use BSD licensing. The warranty disclaimer below holds
# in either instance.
#
# This complete copyright notice must be included in any revised version of the
# source code. Additional authorship citations may be added, but existing
# author citations must be preserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
import sys
import os
from subprocess import *
from functools import partial # Use to connect event-source widget and event handler
from PyQt4.Qt import *
from PyQt4 import QtGui
from PyQt4 import QtCore
from EMAN2 import *
from EMAN2_cppwrap import *
from global_def import *
from sparx import *
# ========================================================================================
# Helper Functions
#
# This function is added here because db_convert_path in EMAN2db.py has a bug.
#
def translate_to_bdb_path(std_path):
'''
Translate a standard file path (std_path) to bdb syntax (return value).
The path pass must contain at lease EMAN2DB directory and .bdb file name.
For instance, if the path is particles/EMAN2DB/data.bdb,
will return bdb:particles#data.
'''
# Check error conditions
if not isinstance(std_path,str):
raise RuntimeError("Path has to be a string")
path_tokens = std_path.split("/")
if len(path_tokens) < 2:
raise ValueError("Invalid file path. The path pass must contain at least \'EMAN2DB\' directory and \'.bdb\' file name (e.g \'./EMAN2DB/data.bdb\'). ")
if path_tokens[-2] != "EMAN2DB":
raise ValueError("Invalid file path. The path pass must contain \'EMAN2DB\' directory (e.g \'./EMAN2DB/data.bdb\').")
if os.path.splitext(path_tokens[-1])[1] != ".bdb":
raise ValueError("Path is invalid. The path pass must contain \'.bdb\' file name (e.g \'./EMAN2DB/data.bdb\').")
# If necessary, compose directory path as a relative path at first
dir = ""
if len(path_tokens) > 2:
for idx in xrange(0, len(path_tokens) - 2):
if idx != 0:
dir += "/"
dir += path_tokens[idx] # accrue the directory
# if the input file path is a absolute path, add '/' at the head of the path
if std_path[0] == "/" and dir[0] != "/":
dir = "/" + dir
# Add '#' before the database name (file basename without extension)
bdb_path = "bdb:"
if dir != "":
bdb_path += dir + "#"
# Finally, add file basename (without .bdb extension)
assert(os.path.splitext(path_tokens[-1])[1] == ".bdb")
bdb_path += os.path.splitext(path_tokens[-1])[0]
return bdb_path
# ========================================================================================
# Inherited by SXcmd_category and SXconst_set
# SXMainWindow use this class to handle events from menu item buttons
class SXmenu_item(object):
def __init__(self, name = "", label = "", short_info = ""):
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.name = name # Name of this menu item, used as a key of dictionary
self.label = label # User friendly name of this menu item
self.short_info = short_info # Short description of this menu item
self.btn = None # QPushButton button instance associating with this menu item
self.widget = None # SXCmdWidget instance associating with this menu item
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# ========================================================================================
class SXcmd_token(object):
def __init__(self):
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.key_base = "" # key base name of command token (argument or option) in command line
self.key_prefix = "" # key prefix of of command token. None for argument, "--" or "-" for option
self.label = "" # User friendly name of argument or option
self.help = "" # Help info
self.group = "" # Tab group: main or advanced
self.is_required = False # Required argument or options. No default values are available
self.is_locked = False # The restore value will be used as the locked value.
self.default = "" # Default value
self.restore = "" # Restore value
self.type = "" # Type of value
self.is_in_io = False # To check consistency between "usage in command line" and list in "== Input ==" and "== Output ==" sections
self.restore_widget = None # Restore widget instance associating with this command token
self.widget = None # Widget instance associating with this command token
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
def initialize_edit(self, key_base):
self.key_base = key_base
self.key_prefix = None
self.label = None
self.help = None
self.group = None
self.is_required = None
self.is_locked = None
self.default = None
self.restore = None
self.type = None
# ========================================================================================
class SXcmd(object):
def __init__(self, category = "", role = "", is_submittable = True):
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.name = "" # Name of this command (i.e. name of sx*.py script but without .py extension), used for generating command line
self.subname = "" # Subname of this command, used for generating command line
self.mode = "" # key base name of a command token, defining mode/subset of this command. For fullset command, use empty string
self.label = "" # User friendly name of this command
self.short_info = "" # Short description of this command
self.mpi_support = False # Flag to indicate if this command suppors MPI version
self.mpi_add_flag = False # DESIGN_NOTE: 2015/11/12 Toshio Moriya. This can be removed when --MPI flag is removed from all sx*.py scripts
self.category = category # Category of this command: sxc_movie, sxc_cter, sxc_window, sxc_isac, sxc_viper, sxc_meridien, sxc_sort3d, sxc_localres, sxc_utilities
self.role = role # Role of this command; sxr_pipe (pipeline), sxr_alt (alternative) sxr_util (utility)
self.is_submittable = is_submittable # External GUI Application (e.g. sxgui_cter.py) should not be submitted to job queue
self.token_list = [] # list of command tokens. Need this to keep the order of command tokens
self.token_dict = {} # dictionary of command tokens, organised by key base name of command token. Easy to access a command token but looses their order
self.btn = None # QPushButton button instance associating with this command
self.widget = None # SXCmdWidget instance associating with this command
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
def get_mode_name_for(self, target_name):
mode_name = self.name
if self.subname != "":
if target_name in ["file_path"]:
mode_name = "%s_%s" % (mode_name, self.subname)
elif target_name in ["human"]:
mode_name = "%s %s" % (mode_name, self.subname)
if self.mode != "":
if target_name in ["file_path"]:
mode_name = "%s_%s" % (mode_name, self.mode)
elif target_name in ["human"]:
mode_name = "%s %s%s" % (mode_name, self.token_dict[self.mode].key_prefix, self.mode)
return mode_name
def get_category_dir_path(self, parent_dir_path = ""):
category_dir_path = self.category.replace("sxc_", "")
if parent_dir_path != "":
category_dir_path = os.path.join(parent_dir_path, category_dir_path)
return category_dir_path
# ========================================================================================
class SXcmd_category(SXmenu_item):
def __init__(self, name = "", label = "", short_info = ""):
super(SXcmd_category, self).__init__(name, label, short_info)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
# self.name = name # Name of this command category (i.e. sxc_movie, sxc_cter, sxc_window, sxc_isac, sxc_viper, sxc_meridien, sxc_sort3d, sxc_localres, sxc_utilities), used as a key of dictionary
# self.label = label # User friendly name of this command category
# self.short_info = short_info # Short description of this command category
self.cmd_list = [] # list of commands in this category. Need this to keep the order of commands
# self.cmd_dict = {} # dictionary of commands in this category, organised by names of commands. Easy to access a command but looses their order
# self.btn = None # QPushButton button instance associating with this category
# self.widget = None # SXCmdWidget instance associating with this category
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# ========================================================================================
class SXconst(object):
def __init__(self):
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.key = "" # key of constant parameter
self.label = "" # User friendly name of constant parameter
self.help = "" # Help info
self.register = "" # Default value
self.type = "" # Type of value
self.register_widget = None # Restore widget instance associating with this command token
self.widget = None # Widget instance associating with this constant parameter
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# ========================================================================================
class SXconst_set(SXmenu_item):
def __init__(self):
super(SXconst_set, self).__init__()
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
# self.name = "" # Name of this constant parameter set
# self.label = "" # User friendly name of this set
# self.short_info = "" # Short description of this set
self.list = [] # list of constant parameters. Need this to keep the order of constant parameters
self.dict = {} # dictionary of constant parameters, organised by keys of constant parameters. Easy to access each constant parameter but looses their order
# self.btn = None # QPushButton button instance associating with this set
# self.widget = None # Widget instance associating with this set
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# ========================================================================================
class SXLookFeelConst(object):
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# static class variables
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
default_bg_color = QColor(229, 229, 229, 192) # default_bg_color = QColor(229, 229, 229, 242) # Greyish-White Transparent
default_bg_color_string = 'rgba(229, 229, 229, 192)' # default_bg_color = QColor(229, 229, 229, 242) # Greyish-White Transparent
sxinfo_widget_bg_color = QColor(0, 0, 0, 10) # Almost-Completely Transparent
sxcmd_widget_bg_color = QColor(0, 0, 0, 0) # Completely Transparent
sxcmd_tab_bg_color = QColor(229, 229, 229, 200) # White Transparent
sxcmd_tab_bg_color_string = 'rgba(229, 229, 229, 200)' # White Transparent
# Constants
project_dir = "sxgui_settings"
sxmain_window_left = 0
sxmain_window_top = 0
sxmain_window_min_width = 1500 # Requirement of specification
sxmain_window_min_height = 360 # Requirement of specification
expected_cmd_counts = 32
grid_margin = 6 # grid_margin = 12
grid_spacing = 6
# Constants initialised with invalid values.
# Valid values should be set by initialise() function
screen_height = -1
screen_width = -1
sxmain_window_width = -1
sxmain_window_height = -1
sxmenu_item_btn_width = -1
grid_distance = -1
sxmenu_btn_area_min_width = -1
sxcmd_btn_area_min_width = -1
sxcmd_widget_area_min_width = -1
file_dialog_dir = ""
@staticmethod
def initialise(sxapp):
# Set the directory for all file dialogs to script directory
SXLookFeelConst.file_dialog_dir = os.getcwd()
monitor_index = 0
# Search for maximun screen height and set it to SXLookFeelConst singleton class
max_screen_height = sxapp.desktop().screenGeometry().height()
for index in range(sxapp.desktop().screenCount()):
screen_height = sxapp.desktop().screenGeometry(index).height()
if max_screen_height < screen_height:
monitor_index = index
max_screen_height = screen_height
SXLookFeelConst.screen_height = max_screen_height
# Search for maximun screen width and set it to SXLookFeelConst singleton class
SXLookFeelConst.screen_width = sxapp.desktop().screenGeometry(monitor_index).width()
# Set size of the main window depending on the screen size
SXLookFeelConst.sxmain_window_height = SXLookFeelConst.screen_height / 2
if SXLookFeelConst.sxmain_window_height <= SXLookFeelConst.sxmain_window_min_height:
SXLookFeelConst.sxmain_window_height = SXLookFeelConst.sxmain_window_min_height
SXLookFeelConst.sxmain_window_width = SXLookFeelConst.sxmain_window_min_width
if SXLookFeelConst.sxmain_window_width >= SXLookFeelConst.screen_width * 3 / 4:
SXLookFeelConst.sxmain_window_width = SXLookFeelConst.screen_width * 3 / 4
if SXLookFeelConst.sxmain_window_width < 960:
SXLookFeelConst.sxmain_window_width = 960
# SXLookFeelConst.sxmain_window_height = SXLookFeelConst.screen_height / 2
# SXLookFeelConst.sxmain_window_width =SXLookFeelConst.sxmain_window_min_width
SXLookFeelConst.sxmenu_item_btn_width = SXLookFeelConst.sxmain_window_height * 0.125
SXLookFeelConst.grid_distance = SXLookFeelConst.sxmenu_item_btn_width / 10
SXLookFeelConst.sxmenu_btn_area_min_width = 2 * SXLookFeelConst.sxmenu_item_btn_width + SXLookFeelConst.grid_distance + 18
SXLookFeelConst.sxcmd_btn_area_min_width = 240
SXLookFeelConst.sxcmd_widget_area_min_width = SXLookFeelConst.sxmain_window_width - SXLookFeelConst.sxmenu_btn_area_min_width - SXLookFeelConst.sxcmd_btn_area_min_width
@staticmethod
def format_path(path):
formatted_path = os.path.relpath(path)
if formatted_path[:len("../")] == "../":
# if the path is above the project root directory (current directory)
# use absolute path
formatted_path = path
# else:
# if the path is project subdirectory
# use relative path
return formatted_path
@staticmethod
def generate_sxcmd_wiki_url(sxcmd, wiki_type = "SPHIRE"):
if wiki_type == "SPHIRE":
# First, handle exceptional cases
if sxcmd.name in ["sxprocess", "sxsummovie", "e2bdb", "e2proc3d", "e2display", "sxpdb2em"] :
sxcmd_category_name = "utilities"
else:
sxcmd_category_name = sxcmd.category.replace("sxc_", "")
# URL Format: "http://sphire.mpg.de/wiki/doku.php?id=pipeline:CMD_CATEGORY:CMD_BASE
sxcmd_wiki_url = "http://sphire.mpg.de/wiki/doku.php?id=pipeline:%s:%s" % (sxcmd_category_name, sxcmd.name)
if sxcmd.subname != "":
sxcmd_wiki_url = "%s_%s" % (sxcmd_wiki_url, sxcmd.subname)
else:
assert (wiki_type == "SPARX")
sxcmd_wiki_url = "%s%s" % (SPARX_DOCUMENTATION_WEBSITE, sxcmd.name)
if sxcmd.subname != "":
sxcmd_wiki_url = "%s_%s" % (sxcmd_wiki_url, sxcmd.subname)
return sxcmd_wiki_url
@staticmethod
def generate_sxmenu_item_wiki_url(sxmenu_item):
# First, handle exceptional cases
# URL Format: "http://sphire.mpg.de/wiki/doku.php?id=pipeline:CMD_CATEGORY:start"
sxmenu_item_wiki_url = "http://sphire.mpg.de/wiki/doku.php?id=pipeline:%s:start" % (sxmenu_item.name.replace("sxc_", ""))
return sxmenu_item_wiki_url
# ========================================================================================
class SXLogoButton(QPushButton):
def __init__(self, logo_file_path, parent = None):
super(SXLogoButton, self).__init__(parent)
# print "MRK_DEBUG: logo_file_path = %s" % logo_file_path
# print "MRK_DEBUG: os.path.exists(logo_file_path) %s" % os.path.exists(logo_file_path)
# Width of logo image
logo_width = SXLookFeelConst.sxmenu_item_btn_width * 2 + SXLookFeelConst.grid_distance
# Style of widget
self.setFixedSize(logo_width, 0.434 * logo_width)
self.customButtonStyle = """
SXLogoButton {{background-color: rgba(0, 0, 0, 0); border: 0px solid black; border-radius: 0px; image: url("{0}");}}
SXLogoButton:focus {{background-color: rgba(0, 0, 0, 0); border: 0px solid grey; border-radius: 0px; image: url("{0}");}}
SXLogoButton:pressed {{background-color: rgba(0, 0, 0, 0); border: 0px solid red; border-radius: 0px; image: url("{0}");}}
""".format(logo_file_path)
self.customButtonStyleClicked = """
SXLogoButton {{background-color: rgba(0, 0, 0, 0); border: 0px solid black; border-radius: 0px; image: url("{0}");}}
SXLogoButton:focus {{background-color: rgba(0, 0, 0, 0); border: 0px solid grey; border-radius: 0px; image: url("{0}");}}
SXLogoButton:pressed {{background-color: rgba(0, 0, 0, 0); border: 0px solid red; border-radius: 0px; image: url("{0}");}}
""".format(logo_file_path)
# Set style and add click event
self.setStyleSheet(self.customButtonStyle)
# Add ToolTip
self.setToolTip('HELP')
# ========================================================================================
class SXPictogramButton(QPushButton):
def __init__(self, pictogram_name, pictogram_file_path, parent = None):
super(SXPictogramButton, self).__init__(parent)
# print "MRK_DEBUG: pictogram_file_path = %s" % pictogram_file_path
# print "MRK_DEBUG: os.path.exists(logo_file_path) %s" % os.path.exists(pictogram_file_path)
# Width of pictogram image
pictogram_width = SXLookFeelConst.sxmenu_item_btn_width
# Style of widget
self.setFixedSize(pictogram_width, pictogram_width)
self.customButtonStyle = """
SXPictogramButton {{background-color: rgba(0, 0, 0, 0); border: 2px solid rgba(0, 0, 0, 0); border-radius: {1}px; image: url("{0}");}}
SXPictogramButton:focus {{background-color: rgba(0, 0, 0, 0); border: 2px solid grey; border-radius: {1}px; image: url("{0}");}}
SXPictogramButton:pressed {{background-color: rgba(0, 0, 0, 0); border: 2px solid rgb(153, 153, 153); border-radius: {1}px; image: url("{0}");}}
""".format(pictogram_file_path, pictogram_width / 6)
self.customButtonStyleClicked = """
SXPictogramButton:pressed {{background-color: rgba(0, 0, 0, 0); border: 2px solid rgb(153, 153, 153); border-radius: {1}px; image: url("{0}");}}
SXPictogramButton {{background-color: rgba(0, 0, 0, 0); border: 2px solid rgb(220, 220, 220); border-radius: {1}px; image: url("{0}");}}
""".format(pictogram_file_path, pictogram_width / 6)
# Set style and add click event
self.setStyleSheet(self.customButtonStyle)
# Add tooltipp
self.setToolTip(pictogram_name.upper())
class SXMenuItemBtnAreaWidget(QWidget):
def __init__(self, sxconst_set, sxcmd_category_list, sxinfo, parent = None):
super(SXMenuItemBtnAreaWidget, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
# Create widgets for pipeline command category button area and miscellaneous function button area
sxcmd_category_btn_subarea_widget = self.create_sxmenu_item_btn_subarea_widget()
misc_func_btn_subarea_widget = self.create_sxmenu_item_btn_subarea_widget()
for sxcmd_category in sxcmd_category_list:
if sxcmd_category.name != "sxc_utilities":
self.add_sxmenu_item_btn_widget(sxcmd_category, sxcmd_category_btn_subarea_widget)
else: # assert(sxcmd_category.name == "sxc_utilities")
self.add_sxmenu_item_btn_widget(sxcmd_category, misc_func_btn_subarea_widget)
self.add_sxmenu_item_btn_widget(sxconst_set, misc_func_btn_subarea_widget)
global_layout = QVBoxLayout()
global_layout.setContentsMargins(0, 0, 0, 0)
sxmenu_item_btn_area_widget = QWidget(self)
sxmenu_item_btn_area_widget.setObjectName('SXMenuItemBtnAreaWidget')
sxmenu_item_btn_area_widget.setStyleSheet('QWidget#SXMenuItemBtnAreaWidget {background-color: rgba(0, 0, 0, 153);}')
sxmenu_item_btn_area_widget.setFixedWidth(SXLookFeelConst.sxmenu_btn_area_min_width)
sxmenu_item_btn_area_layout = QVBoxLayout()
# Add widget of pipeline command category button area to layout
sxmenu_item_btn_area_layout.addWidget(sxcmd_category_btn_subarea_widget)
# Create and Add separator label
layout_label = QHBoxLayout()
line_label = QLabel(sxmenu_item_btn_area_widget)
line_label.setFixedHeight(1)
line_label.setFixedWidth(SXLookFeelConst.sxmenu_item_btn_width * 2)
line_label.setStyleSheet('background-color: rgba(220, 220, 220, 100)')
layout_label.addWidget(line_label)
layout_label.setContentsMargins(0, 7, 0, 7)
sxmenu_item_btn_area_layout.addLayout(layout_label)
# Add widget of miscellaneous function button area to layout
sxmenu_item_btn_area_layout.addWidget(misc_func_btn_subarea_widget)
# Add stretch to make a space and keep sizes of the other widgets to be constant
sxmenu_item_btn_area_layout.addStretch(1)
# Add menu item button for application information
sxmenu_item_btn_pictograph_file_path = '{0}sxgui_logo_sphire.png'.format(get_image_directory())
sxmenu_item_btn = SXLogoButton(sxmenu_item_btn_pictograph_file_path)
sxinfo.btn = sxmenu_item_btn
sxmenu_item_btn_area_layout.addWidget(sxmenu_item_btn)
# Set menu item button area layout to the widget
sxmenu_item_btn_area_widget.setLayout(sxmenu_item_btn_area_layout)
# self related settings
global_layout.addWidget(sxmenu_item_btn_area_widget)
self.setLayout(global_layout)
def create_sxmenu_item_btn_subarea_widget(self):
sxmenu_item_btn_subarea_widget = QWidget()
grid_layout = QGridLayout()
grid_layout.setSpacing(SXLookFeelConst.grid_distance)
grid_layout.setContentsMargins(0, 0, 0, 0)
sxmenu_item_btn_subarea_widget.setLayout(grid_layout)
return sxmenu_item_btn_subarea_widget
def add_sxmenu_item_btn_widget(self, sxmenu_item, sxmenu_item_btn_subarea_widget):
assert(isinstance(sxmenu_item, SXmenu_item) == True) # Assuming the sxmenu_item is an instance of class SXmenu_item
sxmenu_item_btn_pictograph_file_path = "{0}sxgui_pictograph_{1}.png".format(get_image_directory(), sxmenu_item.name.replace("sxc_", ""))
sxmenu_item.btn = SXPictogramButton(sxmenu_item.name.replace("sxc_", ""), sxmenu_item_btn_pictograph_file_path, self)
cur_widget_counts = sxmenu_item_btn_subarea_widget.layout().count()
sxmenu_item_btn_subarea_widget.layout().addWidget(sxmenu_item.btn, cur_widget_counts // 2, cur_widget_counts % 2)
# ========================================================================================
# Provides all necessary functionarity
# tabs only provides widgets and knows how to layout them
class SXCmdWidget(QWidget):
def __init__(self, sxconst_set, sxcmd, parent = None):
super(SXCmdWidget, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.sxconst_set = sxconst_set
self.sxcmd = sxcmd
self.sxcmd_tab_main = None
self.sxcmd_tab_advance = None
self.child_application_list = []
self.gui_settings_file_path = "%s/gui_settings_%s.txt" % (self.sxcmd.get_category_dir_path(SXLookFeelConst.project_dir), self.sxcmd.get_mode_name_for("file_path"))
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# Set grid layout
grid_layout = QGridLayout(self)
# grid_layout.setMargin(SXLookFeelConst.grid_margin)
# grid_layout.setSpacing(SXLookFeelConst.grid_spacing)
self.setAutoFillBackground(True)
palette = QPalette()
palette.setBrush(QPalette.Background, QBrush(SXLookFeelConst.sxcmd_widget_bg_color))
self.setPalette(palette)
self.sxcmd_tab_main = SXCmdTab("Main", self)
self.sxcmd_tab_advance = SXCmdTab("Advanced", self)
tab_widget = QTabWidget()
tab_widget.insertTab(0, self.sxcmd_tab_main, self.sxcmd_tab_main.name)
tab_widget.insertTab(1, self.sxcmd_tab_advance, self.sxcmd_tab_advance.name)
tab_widget.setAutoFillBackground(True)
tab_widget.setStyleSheet("""QTabWidget::pane {
border-top: 2px solid #C2C7CB;
position: absolute;
top: -0.5em;
}
QTabWidget::tab-bar {
alignment: center;
}
QTabBar::tab {
background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1,
stop: 0 #E1E1E1, stop: 0.4 #DDDDDD,
stop: 0.5 #D8D8D8, stop: 1.0 #D3D3D3);
border: 2px solid #C4C4C3;
border-bottom-color: #C2C7CB; /* same as the pane color */
border-top-left-radius: 4px;
border-top-right-radius: 4px;
min-width: 8ex;
padding: 2px;
}
QTabBar::tab:selected, QTabBar::tab:hover {
background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1,
stop: 0 #fafafa, stop: 0.4 #f4f4f4,
stop: 0.5 #e7e7e7, stop: 1.0 #fafafa);
}
QTabBar::tab:selected {
border-color: #9B9B9B;
border-bottom-color: #C2C7CB; /* same as pane color */
}""")
palette = tab_widget.palette()
palette.setBrush(QPalette.Background, QBrush(SXLookFeelConst.sxcmd_widget_bg_color))
tab_widget.setPalette(palette)
grid_layout.addWidget(tab_widget, 0, 0)
def map_widgets_to_sxcmd_line(self):
# Add program name to command line
sxcmd_line = "%s.py" % (self.sxcmd.name)
if self.sxcmd.subname != "":
sxcmd_line += " %s" % (self.sxcmd.subname)
# Loop through all command tokens
for sxcmd_token in self.sxcmd.token_list:
# First, handle very special cases
if sxcmd_token.type == "function":
user_func_name_index = 0
external_file_path_index = 1
user_func_name = str(sxcmd_token.widget[user_func_name_index].text())
external_file_path = str(sxcmd_token.widget[external_file_path_index].text())
# This is not default value
if external_file_path not in ["", sxcmd_token.default[external_file_path_index]]:
# Case 1: User specified an exteranl function different from default or empty string
if os.path.splitext(external_file_path)[1] != ".py":
QMessageBox.warning(self, "Invalid parameter value", "Exteranl File Path (%s) should include the python script extension (.py)." % (external_file_path))
return ""
dir_path, file_basename = os.path.split(external_file_path)
file_basename = file_basename.replace(".py", "")
sxcmd_line += " %s%s=[%s,%s,%s]" % (sxcmd_token.key_prefix, sxcmd_token.key_base, dir_path, file_basename, user_func_name)
elif user_func_name != sxcmd_token.default[user_func_name_index]:
# Case 2: User specified an internal function different from default
sxcmd_line += " %s%s=%s" % (sxcmd_token.key_prefix, sxcmd_token.key_base, user_func_name)
# else: User left default value. Do nothing
# Then, handle the other cases//
else:
if sxcmd_token.type == "bool":
if not ((sxcmd_token.widget.checkState() == Qt.Checked) == sxcmd_token.default and sxcmd_token.is_required == False):
### if (sxcmd_token.widget.checkState() == Qt.Checked) == sxcmd_token.default and sxcmd_token.is_required == True: # Add this token to command line
### if (sxcmd_token.widget.checkState() == Qt.Checked) != sxcmd_token.default and sxcmd_token.is_required == True: # Add this token to command line
### if (sxcmd_token.widget.checkState() == Qt.Checked) != sxcmd_token.default and sxcmd_token.is_required == False: # Add this token to command line
sxcmd_line += " %s%s" % (sxcmd_token.key_prefix, sxcmd_token.key_base)
#else:
### if (sxcmd_token.widget.checkState() == Qt.Checked) == sxcmd_token.default and sxcmd_token.is_required == False: # Do not add this token to command line
else:
if sxcmd_token.widget.text() == sxcmd_token.default:
### if sxcmd_token.widget.text() == sxcmd_token.default and sxcmd_token.is_required == True: # Error case
if sxcmd_token.is_required == True:
QMessageBox.warning(self, "Invalid parameter value", "Token (%s) of command (%s) is required. Please set the value for this." % (sxcmd_token.label, self.sxcmd.get_mode_name_for("human")))
return ""
### if sxcmd_token.widget.text() == sxcmd_token.default and sxcmd_token.is_required == False: # Do not add this token to command line
# else: # assert(sxcmd_token.is_required == False) # Do not add to this command line
else: # sxcmd_token.widget.text() != sxcmd_token.default
### if sxcmd_token.widget.text() != sxcmd_token.default and sxcmd_token.is_required == True: # Add this token to command line
### if sxcmd_token.widget.text() != sxcmd_token.default and sxcmd_token.is_required == False: # Add this token to command line
# For now, using line edit box for the other type
widget_text = str(sxcmd_token.widget.text())
if sxcmd_token.type not in ["int", "float", "apix", "ctfwin", "box", "radius", "mass", "any_file_list", "any_image_list", "any_directory"]:
# Always enclose the string value with single quotes (')
widget_text = widget_text.strip("\'") # make sure the string is not enclosed by (')
widget_text = widget_text.strip("\"") # make sure the string is not enclosed by (")
widget_text = "\'%s\'" % (widget_text) # then, enclose the string value with single quotes (')
if sxcmd_token.key_prefix == "":
sxcmd_line += " %s" % (widget_text)
elif sxcmd_token.key_prefix == "--":
sxcmd_line += " %s%s=%s" % (sxcmd_token.key_prefix, sxcmd_token.key_base, widget_text)
else:
ERROR("Logical Error: Encountered unexpected prefix for token (%s) of command (%s). Consult with the developer." % (sxcmd_token.key_base, self.sxcmd.get_mode_name_for("human")), "%s in %s" % (__name__, os.path.basename(__file__)))
# else: # assert(sxcmd_token.widget.text() == sxcmd_token.default) # Do not add to this command line
return sxcmd_line
def generate_cmd_line(self):
# Generate SX command line
sxcmd_line = self.map_widgets_to_sxcmd_line()
if sxcmd_line:
# SX command line is not empty
# If mpi is not supported set number of MPI processer (np) to 1
np = 1
if self.sxcmd.mpi_support:
# mpi is supported
np = int(str(self.sxcmd_tab_main.mpi_nproc_edit.text()))
#
# DESIGN_NOTE: 2016/03/17 Toshio Moriya
# The MPI policy below has changed!!! An example of this exception is sxcter.py.
# Don't add --MPI flag if np == 1
#
# DESIGN_NOTE: 2015/10/27 Toshio Moriya
# Since we now assume sx*.py exists in only MPI version, always add --MPI flag if necessary
# This is not elegant but can be removed when --MPI flag is removed from all sx*.py scripts
#
if self.sxcmd.mpi_add_flag and np > 1:
sxcmd_line += " --MPI"
# DESIGN_NOTE: 2016/02/11 Toshio Moriya
# Ideally, the following exceptional cases should not handled in here
# because it will remove the generality from the software design
required_key_base = None
if self.sxcmd.name == "sxisac":
required_key_base = "indep_run"
elif self.sxcmd.name == "sxviper":
required_key_base = "nruns"
elif self.sxcmd.name == "sxrviper":
required_key_base = "n_shc_runs"
# else: # Do nothing
if required_key_base != None:
required_divisor = int(str(self.sxcmd.token_dict[required_key_base].widget.text()))
required_label = self.sxcmd.token_dict[required_key_base].label
if required_divisor == 0:
QMessageBox.warning(self, "Invalid parameter value", "\"%s\" must be larger than 0. Please check the setting" % (required_label))
return ""
valid_np = np
if valid_np % required_divisor != 0:
if valid_np < required_divisor:
valid_np = required_divisor
else:
valid_np = valid_np - (valid_np % required_divisor)
QMessageBox.warning(self, "Invalid parameter value", "The number of \"MPI processes\" (%d) is invalid. It MUST BE multiplicity of \"%s\" (%d). Please check the setting. A close valid number is %d." % (np, required_label, required_divisor,valid_np))
return ""
# else: assert(np == 1) # because the "MPI Processes" is disabled for sx*.py process which does not support mpi
# Generate command line according to the case
cmd_line = ""
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked:
# Case 1: queue submission is enabled (MPI can be supported or unsupported)
# Create script for queue submission from a give template
if os.path.exists(self.sxcmd_tab_main.qsub_script_edit.text()) != True:
QMessageBox.warning(self, "Invalid parameter value", "Invalid file path for qsub script template (%s)." % (self.sxcmd_tab_main.qsub_script_edit.text()))
return ""
file_template = open(self.sxcmd_tab_main.qsub_script_edit.text(),"r")
# Extract command line from qsub script template
for line in file_template:
if line.find("XXX_SXCMD_LINE_XXX") != -1:
if np > 1:
cmd_line = line
else:
cmd_line = "XXX_SXCMD_LINE_XXX"
cmd_line = cmd_line.replace("XXX_SXCMD_LINE_XXX", sxcmd_line)
if cmd_line.find("XXX_SXMPI_NPROC_XXX") != -1:
cmd_line = cmd_line.replace("XXX_SXMPI_NPROC_XXX", str(np))
if cmd_line.find("XXX_SXMPI_JOB_NAME_XXX") != -1:
cmd_line = cmd_line.replace("XXX_SXMPI_JOB_NAME_XXX", str(self.sxcmd_tab_main.qsub_job_name_edit.text()))
file_template.close()
elif self.sxcmd.mpi_support:
# Case 2: queue submission is disabled, but MPI is supported
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked: ERROR("Logical Error: Encountered unexpected condition for sxcmd_tab_main.qsub_enable_checkbox.checkState. Consult with the developer.", "%s in %s" % (__name__, os.path.basename(__file__)))
# Add MPI execution to command line
cmd_line = str(self.sxcmd_tab_main.mpi_cmd_line_edit.text())
# If empty string is entered, use a default template
if cmd_line == "":
if np > 1:
cmd_line = "mpirun -np XXX_SXMPI_NPROC_XXX XXX_SXCMD_LINE_XXX"
else:
cmd_line = "XXX_SXCMD_LINE_XXX"
if cmd_line.find("XXX_SXMPI_NPROC_XXX") != -1:
cmd_line = cmd_line.replace("XXX_SXMPI_NPROC_XXX", str(np))
if cmd_line.find("XXX_SXCMD_LINE_XXX") != -1:
cmd_line = cmd_line.replace("XXX_SXCMD_LINE_XXX", sxcmd_line)
else:
# Case 3: queue submission is disabled, and MPI is not supported
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked: ERROR("Logical Error: Encountered unexpected condition for sxcmd_tab_main.qsub_enable_checkbox.checkState. Consult with the developer.", "%s in %s" % (__name__, os.path.basename(__file__)))
# Use sx command as it is
cmd_line = sxcmd_line
else:
# SX command line is be empty because an error happens in map_widgets_to_sxcmd_line
cmd_line = ""
return cmd_line
def execute_cmd_line(self):
# Disable the run command button
execute_btn = self.sender()
execute_btn.setEnabled(False)
QtCore.QTimer.singleShot(5000, lambda: execute_btn.setEnabled(True))
# Generate command line
cmd_line = self.generate_cmd_line()
if cmd_line:
# Command line is not empty
# First, check existence of outputs
for sxcmd_token in self.sxcmd.token_list:
if sxcmd_token.type == "output" or sxcmd_token.type == "output_continue":
if os.path.exists(sxcmd_token.widget.text()) or db_check_dict(str(sxcmd_token.widget.text())):
# DESIGN_NOTE: 2015/11/24 Toshio Moriya
# This special case needs to be handled with more general method...
if sxcmd_token.type == "output_continue" or self.sxcmd.name in ["sxisac", "sxviper", "sxrviper", "sxsort3d"]:
reply = QMessageBox.question(self, "Output Directory/File", "Output Directory/File (%s) already exists. Do you really want to run the program with continue mode?" % (sxcmd_token.widget.text()), QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if reply == QMessageBox.No:
return
# else: # Do nothing
else:
assert(sxcmd_token.type == "output")
QMessageBox.warning(self, "Output Directory/File", "Output Directory/File (%s) already exists. Please change the name and try it again. Aborting execution ..." % (sxcmd_token.widget.text()))
return
# If mpi is not supported set number of MPI processer (np) to 1
np = 1
if self.sxcmd.mpi_support:
np = int(str(self.sxcmd_tab_main.mpi_nproc_edit.text()))
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked:
# Case 1: queue submission is enabled (MPI can be supported or unsupported)
# Create script for queue submission from a give template
template_file_path = self.sxcmd_tab_main.qsub_script_edit.text()
if os.path.exists(template_file_path) == False:
QMessageBox.warning(self, "Invalid parameter value", "Invalid file path for qsub script template (%s). Aborting execution ..." % (template_file_path))
return
file_template = open(self.sxcmd_tab_main.qsub_script_edit.text(),"r")
file_name_qsub_script = "qsub_" + str(self.sxcmd_tab_main.qsub_job_name_edit.text()) + ".sh"
file_qsub_script = open(file_name_qsub_script,"w")
for line_io in file_template:
if line_io.find("XXX_SXCMD_LINE_XXX") != -1:
line_io = cmd_line
else:
if line_io.find("XXX_SXMPI_NPROC_XXX") != -1:
line_io = line_io.replace("XXX_SXMPI_NPROC_XXX", str(np))
if line_io.find("XXX_SXMPI_JOB_NAME_XXX") != -1:
line_io = line_io.replace("XXX_SXMPI_JOB_NAME_XXX", str(self.sxcmd_tab_main.qsub_job_name_edit.text()))
file_qsub_script.write(line_io)
file_template.close()
file_qsub_script.close()
# Generate command line for queue submission
cmd_line_in_script = cmd_line
cmd_line = str(self.sxcmd_tab_main.qsub_cmd_edit.text()) + " " + file_name_qsub_script
print "Wrote the following command line in the queue submission script: "
print cmd_line_in_script
print "Submitted a job by the following command: "
print cmd_line
else:
# Case 2: queue submission is disabled (MPI can be supported or unsupported)
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked: ERROR("Logical Error: Encountered unexpected condition for sxcmd_tab_main.qsub_enable_checkbox.checkState. Consult with the developer.", "%s in %s" % (__name__, os.path.basename(__file__)))
print "Executed the following command: "
print cmd_line
# Execute the generated command line
process = subprocess.Popen(cmd_line, shell=True)
self.emit(SIGNAL("process_started"), process.pid)
if self.sxcmd.is_submittable == False:
assert(self.sxcmd.mpi_support == False)
# Register to This is a GUI application
self.child_application_list.append(process)
# Save the current state of GUI settings
if os.path.exists(self.sxcmd.get_category_dir_path(SXLookFeelConst.project_dir)) == False:
os.mkdir(self.sxcmd.get_category_dir_path(SXLookFeelConst.project_dir))
self.write_params(self.gui_settings_file_path)
# else: SX command line is be empty because an error happens in generate_cmd_line. Let's do nothing
def print_cmd_line(self):
# Generate command line
cmd_line = self.generate_cmd_line()
if cmd_line:
message_line = "Generated the following command line:"
print message_line
print cmd_line
QtGui.QMessageBox.information(self, "Information","%s \n\n%s" % (message_line, cmd_line))
# Save the current state of GUI settings
if os.path.exists(self.sxcmd.get_category_dir_path(SXLookFeelConst.project_dir)) == False:
os.mkdir(self.sxcmd.get_category_dir_path(SXLookFeelConst.project_dir))
self.write_params(self.gui_settings_file_path)
# else: Do nothing
def write_params(self, file_path_out):
file_out = open(file_path_out,"w")
# Write script name for consistency check upon loading
file_out.write("@@@@@ %s gui settings - " % (self.sxcmd.get_mode_name_for("human")))
# file_out.write(EMANVERSION + " (CVS" + CVSDATESTAMP[6:-2] +")")
file_out.write(EMANVERSION + " (GITHUB: " + DATESTAMP +")" )
file_out.write(" @@@@@ \n")
# Define list of (tab) groups
group_main = "main"
group_advanced = "advanced"
# Loop through all groups. First write out values of widgets in main tab, then ones in advanced
for group in [group_main, group_advanced]:
# Loop through all command tokens
for cmd_token in self.sxcmd.token_list:
if cmd_token.group == group:
# First, handle very special cases
if cmd_token.type == "function":
# This type has two line edit boxes as a list of widget
n_widgets = 2
for widget_index in xrange(n_widgets):
val_str = str(cmd_token.widget[widget_index].text())
file_out.write("<%s> %s (default %s) == %s \n" % (cmd_token.key_base, cmd_token.label[widget_index], cmd_token.default[widget_index], val_str))
# Then, handle the other cases
else:
val_str = ""
if cmd_token.type == "bool":
if cmd_token.widget.checkState() == Qt.Checked:
val_str = "YES"
else:
val_str = "NO"
else:
# The other type has only one line edit box
val_str = str(cmd_token.widget.text())
if cmd_token.is_required == False:
file_out.write("<%s> %s (default %s) == %s \n" % (cmd_token.key_base, cmd_token.label, cmd_token.default, val_str))
else:
file_out.write("<%s> %s (default required %s) == %s \n" % (cmd_token.key_base, cmd_token.label, cmd_token.type, val_str))
# else: do nothig
# At the end of parameter file...
# Write MPI parameters
file_out.write("%s == %s \n" % ("MPI processors", str(self.sxcmd_tab_main.mpi_nproc_edit.text())))
file_out.write("%s == %s \n" % ("MPI Command Line Template", str(self.sxcmd_tab_main.mpi_cmd_line_edit.text())))
# Write Qsub parameters
if self.sxcmd_tab_main.qsub_enable_checkbox.checkState() == Qt.Checked:
val_str = "YES"
else:
val_str = "NO"
file_out.write("%s == %s \n" % ("Submit Job to Queue", val_str))
file_out.write("%s == %s \n" % ("Job Name", str(self.sxcmd_tab_main.qsub_job_name_edit.text())))
file_out.write("%s == %s \n" % ("Submission Command", str(self.sxcmd_tab_main.qsub_cmd_edit.text())))
file_out.write("%s == %s \n" % ("Submission Script Template", str(self.sxcmd_tab_main.qsub_script_edit.text())))
file_out.close()
def read_params(self, file_path_in):
file_in = open(file_path_in,"r")
# Check if this parameter file is for this sx script
line_in = file_in.readline()
if line_in.find("@@@@@ %s gui settings" % (self.sxcmd.get_mode_name_for("human"))) != -1:
n_function_type_lines = 2
function_type_line_counter = 0
# loop through the rest of lines
for line_in in file_in:
# Extract label (which should be left of "=="). Also strip the ending spaces
label_in = line_in.split("==")[0].strip()
# Extract value (which should be right of "=="). Also strip all spaces
val_str_in = line_in.split("==")[1].strip()
if label_in == "MPI processors":
self.sxcmd_tab_main.mpi_nproc_edit.setText(val_str_in)
elif label_in == "MPI Command Line Template":
self.sxcmd_tab_main.mpi_cmd_line_edit.setText(val_str_in)
elif label_in == "Submit Job to Queue":
if val_str_in == "YES":
self.sxcmd_tab_main.qsub_enable_checkbox.setChecked(Qt.Checked)
else: # assert(val_str_in == "NO")
self.sxcmd_tab_main.qsub_enable_checkbox.setChecked(Qt.Unchecked)
# self.sxcmd_tab_main.set_qsub_enable_state() # Somehow this place does not paint the text boxes upon application startup
elif label_in == "Job Name":
self.sxcmd_tab_main.qsub_job_name_edit.setText(val_str_in)
elif label_in == "Submission Command":
self.sxcmd_tab_main.qsub_cmd_edit.setText(val_str_in)
elif label_in == "Submission Script Template":
self.sxcmd_tab_main.qsub_script_edit.setText(val_str_in)
else:
# Extract key_base of this command token
target_operator = "<"
item_tail = label_in.find(target_operator)
if item_tail != 0:
QMessageBox.warning(self, "Invalid Parameter File Format", "Command token entry should start from \"%s\" for key base name in line (%s) of file (%s). The format of this file might be corrupted. Please save the paramater file again." % (target_operator, line_in, file_path_in))
label_in = label_in[item_tail + len(target_operator):].strip() # Get the rest of line
target_operator = ">"
item_tail = label_in.find(target_operator)
if item_tail == -1:
QMessageBox.warning(self, "Invalid Parameter File Format", "Command token entry should have \"%s\" closing key base name in line (%s) of file (%s). The format of this file might be corrupted. Please save the paramater file again." % (target_operator, line_in, file_path_in))
key_base = label_in[0:item_tail]
# Get corresponding cmd_token
if key_base not in self.sxcmd.token_dict.keys():
QMessageBox.warning(self, "Invalid Parameter File Format", "Invalid base name of command token \"%s\" is found in line (%s) of file (%s). This parameter file might be imcompatible with the current version. Please save the paramater file again." % (key_base, line_in, file_path_in))
cmd_token = self.sxcmd.token_dict[key_base]
if not cmd_token.is_locked:
# First, handle very special cases
if cmd_token.type == "function":
cmd_token.widget[function_type_line_counter].setText(val_str_in)
function_type_line_counter += 1
function_type_line_counter %= n_function_type_lines # function have two line edit boxes
# Then, handle the other cases
else:
if cmd_token.type == "bool":
# construct new widget(s) for this command token
if val_str_in == "YES":
cmd_token.widget.setChecked(Qt.Checked)
else: # val_str_in == "NO"
cmd_token.widget.setChecked(Qt.Unchecked)
else:
# For now, use line edit box for the other type
cmd_token.widget.setText(val_str_in)
else:
QMessageBox.warning(self, "Fail to load parameters", "The specified file is not parameter file for %s." % self.sxcmd.get_mode_name_for("human"))
file_in.close()
def save_params(self):
file_path_out = str(QFileDialog.getSaveFileName(self, "Save Parameters", SXLookFeelConst.file_dialog_dir, options = QFileDialog.DontUseNativeDialog))
if file_path_out != "":
self.write_params(file_path_out)
def load_params(self):
file_path_in = str(QFileDialog.getOpenFileName(self, "Load parameters", SXLookFeelConst.file_dialog_dir, options = QFileDialog.DontUseNativeDialog))
if file_path_in != "":
self.read_params(file_path_in)
self.sxcmd_tab_main.set_qsub_enable_state()
def select_file(self, target_widget, file_format = ""):
file_path = ""
if file_format == "bdb":
file_path = str(QFileDialog.getOpenFileName(self, "Select BDB File", SXLookFeelConst.file_dialog_dir, "BDB files (*.bdb)", options = QFileDialog.DontUseNativeDialog))
# Use relative path.
if file_path:
file_path = SXLookFeelConst.format_path(file_path)
file_path = translate_to_bdb_path(file_path)
elif file_format == "py":
file_path = str(QFileDialog.getOpenFileName(self, "Select Python File", SXLookFeelConst.file_dialog_dir, "PY files (*.py)", options = QFileDialog.DontUseNativeDialog))
# Use full path
elif file_format == "pdb":
file_path = str(QFileDialog.getOpenFileName(self, "Select PDB File", SXLookFeelConst.file_dialog_dir, "PDB files (*.pdb *.pdb*)", options = QFileDialog.DontUseNativeDialog))
# Use relative path.
if file_path:
file_path = SXLookFeelConst.format_path(file_path)
elif file_format == "mrc":
file_path = str(QFileDialog.getOpenFileName(self, "Select MRC File", SXLookFeelConst.file_dialog_dir, "MRC files (*.mrc *.mrcs)", options = QFileDialog.DontUseNativeDialog))
# Use relative path.
if file_path:
file_path = SXLookFeelConst.format_path(file_path)
elif file_format == "exe":
file_path = str(QFileDialog.getOpenFileName(self, "Select EXE File", SXLookFeelConst.file_dialog_dir, "EXE files (*.exe );; All files (*)", options = QFileDialog.DontUseNativeDialog))
# Use relative path.
if file_path:
file_path = SXLookFeelConst.format_path(file_path)
elif file_format == "any_file_list" or file_format == "any_image_list":
file_path_list = QFileDialog.getOpenFileNames(self, "Select Files", SXLookFeelConst.file_dialog_dir, "All files (*)", options = QFileDialog.DontUseNativeDialog)
# Use relative path.
for a_file_path in file_path_list:
file_path += SXLookFeelConst.format_path(str(a_file_path)) + " "
else:
if file_format:
file_path = str(QFileDialog.getOpenFileName(self, "Select %s File" % (file_format.upper()), SXLookFeelConst.file_dialog_dir, "%s files (*.%s)" % (file_format.upper(), file_format), options = QFileDialog.DontUseNativeDialog))
else:
file_path = str(QFileDialog.getOpenFileName(self, "Select File", SXLookFeelConst.file_dialog_dir, "All files (*)", options = QFileDialog.DontUseNativeDialog))
# Use relative path.
if file_path:
file_path = SXLookFeelConst.format_path(file_path)
if file_path != "":
target_widget.setText(file_path)
def select_dir(self, target_widget):
dir_path = str(QFileDialog.getExistingDirectory(self, "Select Directory", SXLookFeelConst.file_dialog_dir, options = QFileDialog.ShowDirsOnly | QFileDialog.DontResolveSymlinks | QFileDialog.DontUseNativeDialog))
if dir_path != "":
# Use relative path.
target_widget.setText(SXLookFeelConst.format_path(dir_path))
def quit_all_child_applications(self):
# Quit all child applications
for child_application in self.child_application_list:
child_application.kill()
# child_application.terminate() # This call ends up outputing "Program interrupted" Message and it is not pretty...
"""
# def show_output_info(self):
# QMessageBox.information(self, "sx* output","outdir is the name of the output folder specified by the user. If it does not exist, the directory will be created. If it does exist, the program will crash and an error message will come up. Please change the name of directory and restart the program.")
"""
# ========================================================================================
class SXCmdTab(QWidget):
def __init__(self, name, parent=None):
super(SXCmdTab, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.name = name
self.sxcmdwidget = parent
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# local constants
required_cmd_token_restore_tooltip = "Please enter the value manually"
locked_cmd_token_restore_tooltip = "This value is locked"
const_cmd_token_restore_tooltip = "Retrieve the registed constant value for this parameter"
default_cmd_token_restore_tooltip = "Retrieve this default value"
# Setting for layout
grid_row_origin = 0; grid_col_origin = 0
title_row_span = 1; title_col_span = 2
short_info_row_span = 1; short_info_col_span = 5
func_btn_row_span = 1; func_btn_col_span = 2
token_label_row_span = 1; token_label_col_span = 4
token_widget_row_span = 1; token_widget_col_span = 1
cmd_frame_row_span = 32; cmd_frame_col_span = 7
title_label_min_width = 180 # title_label_min_width = 150
title_label_min_height = 40 #title_label_min_height = 80
short_info_min_width = 260 # short_info_min_width = 360
short_info_min_height = 40 # short_info_min_height = 80
func_btn_min_width = 150
btn_min_width = 300
token_label_min_width = 300 # token_label_min_width = 360
token_widget_min_width = 120
mpi_label_min_width = 100
# Setup global layout
global_layout = QVBoxLayout(self)
global_layout.setContentsMargins(0,0,0,0)
global_layout.setSpacing(0)
# Setup scroll area and its widget
scroll_area = QScrollArea()
# scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
# scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn) # MRK_DEBUG: Useful during designing layout
scroll_area.setWidgetResizable(True)
scroll_area_widget = QWidget(scroll_area)
# Setup scroll widget and its background color
scroll_area.setStyleSheet("QScrollArea {background-color:transparent;}");
### scroll_area_widget.setStyleSheet("background-color:transparent;");
scroll_area_widget.setAutoFillBackground(True)
scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
palette = QPalette()
palette.setBrush(QPalette.Background, QBrush(SXLookFeelConst.sxcmd_tab_bg_color))
scroll_area_widget.setPalette(palette)
# Register the widget to scroll area
scroll_area.setWidget(scroll_area_widget)
# Register the scroll area to the global layout
global_layout.addWidget(scroll_area)
# Setup other layouts
scroll_layout = QVBoxLayout(scroll_area_widget)
scroll_layout.setContentsMargins(0,0,0,0)
title_hbox = QHBoxLayout()
title_layout = QGridLayout()
title_layout.setMargin(SXLookFeelConst.grid_margin)
title_layout.setSpacing(SXLookFeelConst.grid_spacing)
# title_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span, token_widget_min_width)
# title_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_min_width)
# title_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_min_width)
# title_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_min_width)
grid_layout = QGridLayout()
grid_layout.setMargin(SXLookFeelConst.grid_margin)
grid_layout.setSpacing(SXLookFeelConst.grid_spacing)
grid_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span, token_widget_min_width)
grid_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_min_width)
grid_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_min_width)
grid_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_min_width)
submit_layout = QGridLayout()
submit_layout.setMargin(SXLookFeelConst.grid_margin)
submit_layout.setSpacing(SXLookFeelConst.grid_spacing)
submit_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span, token_widget_min_width)
submit_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_min_width)
submit_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_min_width)
submit_layout.setColumnMinimumWidth(grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_min_width)
title_hbox.addLayout(title_layout)
# title_hbox.addStretch(1)
title_layout.setColumnStretch(grid_row_origin + token_label_col_span, title_layout.columnStretch(grid_row_origin+token_label_col_span) + 1)
scroll_layout.addLayout(title_hbox)
scroll_layout.addLayout(grid_layout)
scroll_layout.addLayout(submit_layout)
scroll_layout.addStretch(1)
# # Give the columns of token label a higher priority to stretch relative to the others
# for col_span in xrange(token_label_col_span):
# grid_layout.setColumnStretch(grid_row_origin + col_span, grid_layout.columnStretch(grid_row_origin+col_span) + 1)
# Define the tab frame within the tab layout
# tab_frame = QFrame()
# grid_layout.addWidget(tab_frame, grid_row_origin, grid_col_origin, cmd_frame_row_span, cmd_frame_col_span)
# Start add command token widgets to the grid layout
grid_row = grid_row_origin
tab_group = self.name.lower()
if tab_group == "main":
# Set a label and its position in this tab
temp_label = QLabel("%s" % (self.sxcmdwidget.sxcmd.get_mode_name_for("human")))
temp_label.setMinimumWidth(title_label_min_width)
temp_label.setMinimumHeight(title_label_min_height)
# temp_label.setFixedWidth(title_label_min_width)
# temp_label.setFixedHeight(title_label_min_height)
title_layout.addWidget(temp_label, grid_row, grid_col_origin, title_row_span, title_col_span)
#
# NOTE: 2015/11/17 Toshio Moriya
# Necessary to separate "%s" from the information for avoiding to invoke the tag interpretations of string
# e.g. < becomes the escape character
#
temp_label = QLabel("%s" % (self.sxcmdwidget.sxcmd.short_info))
temp_label.setWordWrap(True)
temp_label.setMinimumWidth(short_info_min_width)
temp_label.setMinimumHeight(short_info_min_height)
# temp_label.setFixedHeight(short_info_min_height)
title_layout.addWidget(temp_label, grid_row, grid_col_origin + title_col_span, short_info_row_span, short_info_col_span)
grid_row += short_info_row_span
elif tab_group == "advanced":
# Set a label and its position in this tab
temp_label = QLabel("%s" % (self.sxcmdwidget.sxcmd.get_mode_name_for("human")))
temp_label.setMinimumWidth(title_label_min_width)
temp_label.setMinimumHeight(title_label_min_height)
# temp_label.setFixedWidth(title_label_min_width)
# temp_label.setFixedHeight(title_label_min_height)
title_layout.addWidget(temp_label, grid_row, grid_col_origin, title_row_span, title_col_span)
temp_label = QLabel("Set advanced parameters", self)
temp_label.setWordWrap(True)
temp_label.setMinimumWidth(short_info_min_width)
temp_label.setMinimumHeight(short_info_min_height)
# temp_label.setFixedHeight(short_info_min_height)
title_layout.addWidget(temp_label, grid_row, grid_col_origin + title_col_span, short_info_row_span, short_info_col_span)
# Add space
grid_row += 2
# Add widget for editing command args and options
for cmd_token in self.sxcmdwidget.sxcmd.token_list:
if cmd_token.group == tab_group:
# First, handle very special cases
if cmd_token.type == "function":
n_widgets = 2 # function type has two line edit boxes
cmd_token_widget = [None] * n_widgets
cmd_token_restore_widget = [None] * n_widgets
# Define custom style for restore widgets
custom_style = "QPushButton {color:gray; }"
# Create widgets for user function name
widget_index = 0
temp_label = QLabel(cmd_token.label[widget_index])
temp_label.setMinimumWidth(token_label_min_width)
grid_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
assert(cmd_token.is_required == False)
cmd_token_restore_widget[widget_index] = QPushButton("%s" % cmd_token.restore[widget_index])
cmd_token_restore_widget[widget_index].setStyleSheet(custom_style)
cmd_token_restore_widget[widget_index].setToolTip(''+default_cmd_token_restore_tooltip+'')
grid_layout.addWidget(cmd_token_restore_widget[widget_index], grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
# cmd_token_widget[widget_index] = QLineEdit(self)
cmd_token_widget[widget_index] = QLineEdit()
cmd_token_widget[widget_index].setText(cmd_token.restore[widget_index])
cmd_token_widget[widget_index].setToolTip(''+cmd_token.help[widget_index]+'')
grid_layout.addWidget(cmd_token_widget[widget_index], grid_row, grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_row_span, token_widget_col_span)
self.connect(cmd_token_restore_widget[widget_index], SIGNAL("clicked()"), partial(self.handle_restore_widget_event, cmd_token, widget_index))
grid_row += 1
# Create widgets for external file path containing above user function
widget_index = 1
temp_label = QLabel(cmd_token.label[widget_index])
grid_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
assert(cmd_token.is_required == False)
cmd_token_restore_widget[widget_index] = QPushButton("%s" % cmd_token.restore[widget_index])
cmd_token_restore_widget[widget_index].setStyleSheet(custom_style)
cmd_token_restore_widget[widget_index].setToolTip(''+default_cmd_token_restore_tooltip+'')
grid_layout.addWidget(cmd_token_restore_widget[widget_index], grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
cmd_token_widget[widget_index] = QLineEdit()
cmd_token_widget[widget_index].setText(cmd_token.restore[widget_index]) # Because default user functions is internal
cmd_token_widget[widget_index].setToolTip(''+cmd_token.help[widget_index]+'')
grid_layout.addWidget(cmd_token_widget[widget_index], grid_row, grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_row_span, token_widget_col_span)
self.connect(cmd_token_restore_widget[widget_index], SIGNAL("clicked()"), partial(self.handle_restore_widget_event, cmd_token, widget_index))
file_format = "py"
temp_btn = QPushButton("Select Script")
temp_btn.setToolTip(''+"Display open file dailog to select .%s python script file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget[widget_index], file_format))
grid_row += 1
# temp_label = QLabel(cmd_token.help[widget_index])
# grid_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
#
# grid_row += 1
# Then, handle the other cases
else:
# Create label widget
temp_label = QLabel(cmd_token.label)
temp_label.setMinimumWidth(token_label_min_width)
grid_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
# Create widget and associate it to this cmd_token
cmd_token_widget = None
cmd_token_restore_widget = None
cmd_token_restore_tooltip = default_cmd_token_restore_tooltip
if cmd_token.type == "bool":
btn_name = "NO"
is_btn_enable = True
custom_style = "QPushButton {color:gray; }"
if cmd_token.restore:
btn_name = "YES"
if cmd_token.type in parent.sxconst_set.dict.keys():
custom_style = "QPushButton {color:green; }"
cmd_token_restore_tooltip = const_cmd_token_restore_tooltip
elif cmd_token.is_required:
if cmd_token.is_locked:
btn_name = "locked"
custom_style = "QPushButton {color:blue; }"
is_btn_enable = False
cmd_token_restore_tooltip = locked_cmd_token_restore_tooltip
else:
btn_name = "required"
custom_style = "QPushButton {color:red; }"
is_btn_enable = False
cmd_token_restore_tooltip = required_cmd_token_restore_tooltip
cmd_token_restore_widget = QPushButton("%s" % btn_name)
cmd_token_restore_widget.setStyleSheet(custom_style)
cmd_token_restore_widget.setEnabled(is_btn_enable)
grid_layout.addWidget(cmd_token_restore_widget, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
# construct new widget(s) for this command token
cmd_token_widget = QCheckBox("")
if cmd_token.restore == True:
cmd_token_widget.setCheckState(Qt.Checked)
else:
cmd_token_widget.setCheckState(Qt.Unchecked)
cmd_token_widget.setEnabled(not cmd_token.is_locked)
grid_layout.addWidget(cmd_token_widget, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_row_span, token_widget_col_span)
self.connect(cmd_token_restore_widget, SIGNAL("clicked()"), partial(self.handle_restore_widget_event, cmd_token))
else:
btn_name = "%s" % cmd_token.restore
custom_style = "QPushButton {color:gray; }"
is_btn_enable = True
if cmd_token.type in parent.sxconst_set.dict.keys():
custom_style = "QPushButton {color:green; }"
cmd_token_restore_tooltip = const_cmd_token_restore_tooltip
elif cmd_token.is_required:
if cmd_token.is_locked:
btn_name = "locked"
custom_style = "QPushButton {color:blue; }"
is_btn_enable = False
cmd_token_restore_tooltip = locked_cmd_token_restore_tooltip
else:
btn_name = "required"
custom_style = "QPushButton {color:red; }"
is_btn_enable = False
cmd_token_restore_tooltip = required_cmd_token_restore_tooltip
cmd_token_restore_widget = QPushButton("%s" % btn_name)
cmd_token_restore_widget.setStyleSheet(custom_style)
cmd_token_restore_widget.setEnabled(is_btn_enable)
grid_layout.addWidget(cmd_token_restore_widget, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
cmd_token_widget = QLineEdit()
cmd_token_widget.setText(cmd_token.restore)
cmd_token_widget.setEnabled(not cmd_token.is_locked)
grid_layout.addWidget(cmd_token_widget, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_row_span, token_widget_col_span)
self.connect(cmd_token_restore_widget, SIGNAL("clicked()"), partial(self.handle_restore_widget_event, cmd_token))
if cmd_token.type == "image":
file_format = "hdf"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "bdb"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
elif cmd_token.type == "any_image":
temp_btn = QPushButton("Select Image")
temp_btn.setToolTip(''+"Display open file dailog to select standard format image file (e.g. .hdf, .mrc)")
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "any_micrograph":
temp_btn = QPushButton("Select Image")
temp_btn.setToolTip(''+"Display open file dailog to select standard format image file (e.g. .hdf, .mrc)")
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget))
file_format = "txt"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s parameter file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
elif cmd_token.type == "any_file_list":
temp_btn = QPushButton("Select Files")
temp_btn.setToolTip(''+"Display open file dailog to select files (e.g. *.*)")
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, cmd_token.type))
file_format = "bdb"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
elif cmd_token.type == "any_image_list":
temp_btn = QPushButton("Select Images")
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select standard format image files (e.g. .hdf, .mrc)")
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, cmd_token.type))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "bdb" or cmd_token.key_base == "makevstack":
file_format = "bdb"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "pdb":
file_format = "pdb"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "hdf":
file_format = "hdf"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "mrc":
file_format = "mrc"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "parameters":
temp_btn = QPushButton("Select Parameter")
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select parameter file (e.g. .txt)")
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "txt":
file_format = "txt"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s parameter file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "exe":
file_format = "exe"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select .%s parameter file" % file_format)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget, file_format))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "any_file":
temp_btn = QPushButton("Select File")
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display open file dailog to select file (e.g. *.*)")
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, cmd_token_widget))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
elif cmd_token.type == "directory" or cmd_token.type == "any_directory" or cmd_token.type == "output_continue":
temp_btn = QPushButton("Select directory")
temp_btn.setMinimumWidth(func_btn_min_width)
temp_btn.setToolTip(''+"Display select directory dailog"+'')
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span)
self.connect(temp_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_dir, cmd_token_widget))
file_format = "invisible"
temp_btn = QPushButton("Select .%s" % file_format)
temp_btn.setToolTip(''+"Display open file dailog to select .%s format image file" % file_format)
temp_btn.setEnabled(False)
temp_btn.setStyleSheet('background: rgba(0, 0, 0, 0); color: rgba(0, 0, 0, 0); border: 0px rgba(0, 0, 0, 0) solid')
temp_btn.setMinimumWidth(func_btn_min_width)
grid_layout.addWidget(temp_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 3, token_widget_row_span, token_widget_col_span)
# elif cmd_token.type == "output":
# else:
# if cmd_token.type not in ["int", "float", "string", "apix", "ctfwin", "box", "radius", "sym", "mass"]: ERROR("Logical Error: Encountered unsupported type (%s). Consult with the developer." % cmd_token.type, "%s in %s" % (__name__, os.path.basename(__file__)))
cmd_token_widget.setToolTip(''+cmd_token.help+'')
cmd_token_restore_widget.setToolTip(''+cmd_token_restore_tooltip+'')
grid_row += 1
# Register this widget
cmd_token.widget = cmd_token_widget
cmd_token.restore_widget = cmd_token_restore_widget
if tab_group == "main":
# Add space
grid_row += 1
# Add gui components for MPI related paramaters
temp_label = QLabel("MPI processors")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
# self.mpi_nproc_edit = QLineEdit(self)
self.mpi_nproc_edit = QLineEdit()
self.mpi_nproc_edit.setText("1")
self.mpi_nproc_edit.setToolTip(''+"Number of processors to use. default is single processor mode"+'')
submit_layout.addWidget(self.mpi_nproc_edit, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
# Add save paramaters button
self.save_params_btn = QPushButton("Save parameters")
self.save_params_btn.setMinimumWidth(btn_min_width)
self.save_params_btn.setToolTip(''+"Save gui parameter settings"+'')
self.connect(self.save_params_btn, SIGNAL("clicked()"), self.sxcmdwidget.save_params)
submit_layout.addWidget(self.save_params_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span*2)
grid_row += 1
temp_label = QLabel("MPI command line template")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
self.mpi_cmd_line_edit = QLineEdit()
self.mpi_cmd_line_edit.setText("")
self.mpi_cmd_line_edit.setToolTip(''+"Template of MPI command line (e.g. \"mpirun -np XXX_SXMPI_NPROC_XXX --host n0,n1,n2 XXX_SXCMD_LINE_XXX\"). if empty, use \"mpirun -np XXX_SXMPI_NPROC_XXX XXX_SXCMD_LINE_XXX\"")
submit_layout.addWidget(self.mpi_cmd_line_edit, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
# Add load paramaters button
self.load_params_btn = QPushButton("Load parameters")
self.load_params_btn.setMinimumWidth(btn_min_width)
self.load_params_btn.setToolTip(''+"Load gui parameter settings to retrieve a previously-saved one"+'')
self.connect(self.load_params_btn, SIGNAL("clicked()"), self.sxcmdwidget.load_params)
submit_layout.addWidget(self.load_params_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span*2)
grid_row += 1
# If MPI is not supported, disable this widget
self.set_text_entry_widget_enable_state(self.mpi_nproc_edit, self.sxcmdwidget.sxcmd.mpi_support)
self.set_text_entry_widget_enable_state(self.mpi_cmd_line_edit, self.sxcmdwidget.sxcmd.mpi_support)
# Add gui components for queue submission (qsub)
is_qsub_enabled = False
temp_label = QLabel("Submit job to queue")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
self.qsub_enable_checkbox = QCheckBox("")
if is_qsub_enabled == True:
self.qsub_enable_checkbox.setCheckState(Qt.Checked)
else: # assert(is_qsub_enabled == False)
self.qsub_enable_checkbox.setCheckState(Qt.Unchecked)
self.qsub_enable_checkbox.setToolTip(''+"Submit job to queue"+'')
self.qsub_enable_checkbox.stateChanged.connect(self.set_qsub_enable_state) # To control enable state of the following qsub related widgets
self.qsub_enable_checkbox.setEnabled(self.sxcmdwidget.sxcmd.is_submittable)
submit_layout.addWidget(self.qsub_enable_checkbox, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
grid_row += 1
temp_label = QLabel("Job name")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
self.qsub_job_name_edit = QLineEdit()
if self.sxcmdwidget.sxcmd.is_submittable == True:
self.qsub_job_name_edit.setText(self.sxcmdwidget.sxcmd.get_mode_name_for("file_path"))
else: # assert(self.sxcmdwidget.sxcmd.is_submittable == False)
assert(self.sxcmdwidget.sxcmd.mpi_support == False)
self.qsub_job_name_edit.setText("N/A")
self.qsub_job_name_edit.setToolTip(''+"Name of this job"+'')
submit_layout.addWidget(self.qsub_job_name_edit, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
grid_row += 1
temp_label = QLabel("Submission command")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
self.qsub_cmd_edit = QLineEdit()
if self.sxcmdwidget.sxcmd.is_submittable == True:
self.qsub_cmd_edit.setText("qsub")
else: # assert(self.sxcmdwidget.sxcmd.is_submittable == False)
assert(self.sxcmdwidget.sxcmd.mpi_support == False)
self.qsub_cmd_edit.setText("N/A")
self.qsub_cmd_edit.setToolTip(''+"Name of submission command to queue job"+'')
submit_layout.addWidget(self.qsub_cmd_edit, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
self.cmd_line_btn = QPushButton("Generate command line")
self.cmd_line_btn.setMinimumWidth(btn_min_width)
self.cmd_line_btn.setToolTip(''+"Generate command line from gui parameter settings and automatically save settings"+'')
self.connect(self.cmd_line_btn, SIGNAL("clicked()"), self.sxcmdwidget.print_cmd_line)
submit_layout.addWidget(self.cmd_line_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span*2)
grid_row += 1
temp_label = QLabel("Submission script template")
temp_label.setMinimumWidth(token_label_min_width)
submit_layout.addWidget(temp_label, grid_row, grid_col_origin, token_label_row_span, token_label_col_span)
self.qsub_script_edit = QLineEdit()
if self.sxcmdwidget.sxcmd.is_submittable == True:
self.qsub_script_edit.setText("msgui_qsub.sh")
else: # assert(self.sxcmdwidget.sxcmd.is_submittable == False)
assert(self.sxcmdwidget.sxcmd.mpi_support == False)
self.qsub_script_edit.setText("N/A")
self.qsub_script_edit.setToolTip(''+"File name of submission script template (e.g. $PROJECT_DIR/msgui_qsub.sh)"+'')
submit_layout.addWidget(self.qsub_script_edit, grid_row, grid_col_origin + token_label_col_span, token_widget_row_span, token_widget_col_span)
self.qsub_script_open_btn = QPushButton("Select Template")
self.qsub_script_open_btn.setMinimumWidth(func_btn_min_width)
self.qsub_script_open_btn.setToolTip(''+"Display open file dailog to select job submission script template file"+'')
self.connect(self.qsub_script_open_btn, SIGNAL("clicked()"), partial(self.sxcmdwidget.select_file, self.qsub_script_edit))
submit_layout.addWidget(self.qsub_script_open_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span, token_widget_row_span, token_widget_col_span)
# Add a run button
# self.execute_btn = QPushButton("Run %s" % self.sxcmdwidget.sxcmd.get_mode_name_for("human"))
self.execute_btn = QPushButton("Run command")
# make 3D textured push button look
custom_style = "QPushButton {font: bold; color: #000;border: 1px solid #333;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #8D0)} QPushButton:pressed {font: bold; color: #000;border: 1px solid #333;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #084)} QPushButton:focus {font: bold; color: #000;border: 2px solid #8D0;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #8D0)} QPushButton:disabled {font: bold; color: #000;border: 1px solid #333;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #ff0000)}"
self.execute_btn.setStyleSheet(custom_style)
self.execute_btn.setMinimumWidth(btn_min_width)
self.execute_btn.setToolTip(''+"Run %s and automatically save gui parameter settings" % self.sxcmdwidget.sxcmd.get_mode_name_for("human"))
self.connect(self.execute_btn, SIGNAL("clicked()"), self.sxcmdwidget.execute_cmd_line)
submit_layout.addWidget(self.execute_btn, grid_row, grid_col_origin + token_label_col_span + token_widget_col_span * 2, token_widget_row_span, token_widget_col_span*2)
grid_row += 1
# Initialize enable state of qsub related widgets
self.set_qsub_enable_state()
def set_text_entry_widget_enable_state(self, widget, is_enabled):
# Set enable state and background color of text entry widget according to enable state
default_palette = QPalette()
bg_color = default_palette.color(QPalette.Inactive, QPalette.Base)
if is_enabled == False:
bg_color = default_palette.color(QPalette.Disabled, QPalette.Base)
widget.setEnabled(is_enabled)
palette = widget.palette()
palette.setColor(widget.backgroundRole(), bg_color)
widget.setPalette(palette)
def set_qsub_enable_state(self):
is_enabled = False
if self.qsub_enable_checkbox.checkState() == Qt.Checked:
is_enabled = True
# Set enable state and background color of mpi related widgets
if self.sxcmdwidget.sxcmd.mpi_support:
self.set_text_entry_widget_enable_state(self.mpi_cmd_line_edit, not is_enabled)
# Set enable state and background color of qsub related widgets
self.set_text_entry_widget_enable_state(self.qsub_job_name_edit, is_enabled)
self.set_text_entry_widget_enable_state(self.qsub_cmd_edit, is_enabled)
self.set_text_entry_widget_enable_state(self.qsub_script_edit, is_enabled)
self.qsub_script_open_btn.setEnabled(is_enabled)
def handle_restore_widget_event(self, sxcmd_token, widget_index=0):
assert(not sxcmd_token.is_locked)
if sxcmd_token.type == "function":
assert(len(sxcmd_token.widget) == 2 and len(sxcmd_token.restore) == 2 and widget_index < 2)
sxcmd_token.widget[widget_index].setText("%s" % sxcmd_token.restore[widget_index])
else:
if sxcmd_token.type == "bool":
if sxcmd_token.restore:
sxcmd_token.widget.setChecked(Qt.Checked)
else: # sxcmd_token.restore == False
sxcmd_token.widget.setChecked(Qt.Unchecked)
else:
sxcmd_token.widget.setText("%s" % sxcmd_token.restore)
# ========================================================================================
# Command Category Widget (opened by class SXMainWindow)
class SXCmdCategoryWidget(QWidget):
def __init__(self, sxconst_set, sxcmd_category, parent = None):
super(SXCmdCategoryWidget, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.sxconst_set = sxconst_set
self.sxcmd_category = sxcmd_category
self.cur_sxcmd = None
# Layout constants
self.sxcmd_btn_row_span = 1
self.sxcmd_btn_col_span = 1
self.sxcmd_btn_area_row_span = self.sxcmd_btn_row_span * SXLookFeelConst.expected_cmd_counts
self.sxcmd_btn_area_col_span = self.sxcmd_btn_col_span
self.sxcmd_widget_area_row_span = self.sxcmd_btn_area_row_span
self.sxcmd_widget_area_col_span = 1
self.grid_row_origin = 0
self.grid_col_origin = 0
# Layout variables
self.grid_layout = None # grid layout
self.grid_row = self.grid_row_origin # Keep current row
self.grid_col = self.grid_col_origin # keep current column
self.sxcmd_btn_group = None
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# --------------------------------------------------------------------------------
# Setup Window Layout
# --------------------------------------------------------------------------------
self.setup_layout(QBrush(SXLookFeelConst.default_bg_color))
# --------------------------------------------------------------------------------
# Add SX Commands (sx*.py) associated widgets
# --------------------------------------------------------------------------------
self.add_sxcmd_widgets()
# # --------------------------------------------------------------------------------
# # Load the previously saved parameter setting of this sx command
# # Override the registration of project constant parameter settings with the previously-saved one
# # --------------------------------------------------------------------------------
# for sxcmd in self.sxcmd_category.cmd_list:
# if os.path.exists(sxcmd.widget.gui_settings_file_path):
# sxcmd.widget.read_params(sxcmd.widget.gui_settings_file_path)
# --------------------------------------------------------------------------------
# Alway select the 1st entry of the command list upon startup
# --------------------------------------------------------------------------------
self.handle_sxcmd_btn_event(self.sxcmd_category.cmd_list[0])
def setup_layout(self, background_brush):
# Setup background color of this widget
self.setAutoFillBackground(True)
palette = QPalette()
palette.setBrush(QPalette.Background, background_brush)
self.setPalette(palette)
# Setup grid layout in the scroll area
self.grid_layout = QGridLayout()
self.grid_layout.setMargin(SXLookFeelConst.grid_margin)
self.grid_layout.setSpacing(SXLookFeelConst.grid_spacing)
self.grid_layout.setColumnMinimumWidth(0, SXLookFeelConst.sxcmd_btn_area_min_width)
# self.grid_layout.setColumnMinimumWidth(1, SXLookFeelConst.sxcmd_widget_area_min_width)
# Give the column of the command settings area a higher stretch priority so that the other area does not stretch horizontally
# self.grid_layout.setColumnStretch(self.grid_col_origin + self.sxcmd_btn_area_col_span, self.grid_layout.columnStretch(self.grid_col_origin + self.sxcmd_btn_area_col_span) + 1)
# Add Pipeline SX Commands (sx*.py) associated widgets
def add_sxcmd_widgets(self):
self.sxcmd_btn_group = QButtonGroup()
# self.sxcmd_btn_group.setExclusive(True) # NOTE: 2016/02/18 Toshio Moriya: Without QPushButton.setCheckable(True). This does not do anything. Let manually do this
current_role = None
self.stacked_layout = QStackedLayout()
grid_box_layout = QVBoxLayout()
grid_box_layout.addLayout(self.grid_layout)
grid_box_layout.addStretch(1)
global_layout = QHBoxLayout()
global_layout.addLayout(grid_box_layout)
global_layout.addLayout(self.stacked_layout, stretch=1)
self.setLayout(global_layout)
# Add SX Commands (sx*.py) associated widgets
for sxcmd in self.sxcmd_category.cmd_list:
if sxcmd.role != current_role:
# Add title label and set position and font style
label_text = ""
if sxcmd.role == "sxr_pipe":
label_text = "COMMANDS"
elif sxcmd.role == "sxr_alt":
label_text = "ALTERNATIVES"
elif sxcmd.role == "sxr_util":
label_text = "UTILITIES"
else:
label_text = "UNKNOWN"
if current_role != None:
self.grid_row += 1
# title=QLabel("%s (shift-click for wiki)" % label_text)
title=QLabel("%s (shift-click for wiki)" % label_text)
self.grid_layout.addWidget(title, self.grid_row, self.grid_col_origin, self.sxcmd_btn_row_span, self.sxcmd_btn_col_span)
self.grid_row += 1
current_role = sxcmd.role
# Add buttons for this sx*.py processe
sxcmd.btn = QPushButton(sxcmd.label)
# sxcmd.btn.setCheckable(True) # NOTE: 2016/02/18 Toshio Moriya: With this setting, we can not move the focus to the unchecked butttons... PyQt bug?
sxcmd.btn.setToolTip(''+sxcmd.short_info+'')
self.sxcmd_btn_group.addButton(sxcmd.btn)
self.grid_layout.addWidget(sxcmd.btn, self.grid_row, self.grid_col_origin, self.sxcmd_btn_row_span, self.sxcmd_btn_col_span)
# Create SXCmdWidget for this sx*.py processe
sxcmd.widget = SXCmdWidget(self.sxconst_set, sxcmd)
self.stacked_layout.addWidget(sxcmd.widget)
# connect widget signals
self.connect(sxcmd.btn, SIGNAL("clicked()"), partial(self.handle_sxcmd_btn_event, sxcmd))
self.grid_row += 1
def load_previous_session(self):
for sxcmd in self.sxcmd_category.cmd_list:
if os.path.exists(sxcmd.widget.gui_settings_file_path):
sxcmd.widget.read_params(sxcmd.widget.gui_settings_file_path)
def handle_sxcmd_btn_event(self, sxcmd):
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ShiftModifier:
# os.system("python -m webbrowser %s%s" % (SPARX_DOCUMENTATION_WEBSITE, sxcmd.name))
# sxcmd_wiki_url = SXLookFeelConst.generate_sxcmd_wiki_url(sxcmd, wiki_type = "SPARX")
sxcmd_wiki_url = SXLookFeelConst.generate_sxcmd_wiki_url(sxcmd)
print "Opening Wiki Page ..."
print sxcmd_wiki_url
os.system("python -m webbrowser %s" % (sxcmd_wiki_url))
return
if self.cur_sxcmd == sxcmd: return
if self.cur_sxcmd != None:
custom_style = "QPushButton {font: normal; color:black; }" # custom_style = "QPushButton {color:#000; }"
self.cur_sxcmd.btn.setStyleSheet(custom_style)
self.cur_sxcmd = sxcmd
if self.cur_sxcmd != None:
self.stacked_layout.setCurrentWidget(self.cur_sxcmd.widget)
custom_style = "QPushButton {font: bold; color:blue; }" # custom_style = "QPushButton {font: bold; color:#8D0; }"
self.cur_sxcmd.btn.setStyleSheet(custom_style)
def quit_all_child_applications(self):
# Quit all child applications
for sxcmd in self.sxcmd_category.cmd_list:
sxcmd.widget.quit_all_child_applications()
# ========================================================================================
# Layout of the project constants parameters widget; owned by the main window
class SXConstSetWidget(QWidget):
def __init__(self, sxconst_set, sxcmd_category_list, parent=None):
super(SXConstSetWidget, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
self.sxconst_set = sxconst_set
self.sxcmd_category_list = sxcmd_category_list
self.gui_settings_file_path = "%s/gui_settings_project.txt" % (SXLookFeelConst.project_dir)
# Layout constants and variables
global_row_origin = 0; global_col_origin = 0
global_row_span = 4; global_col_span = 1
header_row_origin = 0; header_col_origin = 0
title_row_span = 1; title_col_span = 1
short_info_row_span = 1; short_info_col_span = 1
title_min_width = 300
short_info_min_width = 300
short_info_min_height = 80
const_set_row_origin = 0; const_set_col_origin = 0
const_label_row_span = 1; const_label_col_span = 1
const_register_widget_row_span = 1; const_register_widget_col_span = 1
const_widget_row_span = 1; const_widget_col_span = 1
const_label_min_width = 150
const_register_widget_min_width = const_label_min_width
const_widget_min_width = const_label_min_width
btn_row_origin = 0; btn_col_origin = 0
func_btn_row_span = 1; func_btn_col_span = 1
register_btn_row_span = 1; register_btn_col_span = 2
func_btn_min_width = 50
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# Set the background color of this widget
self.setAutoFillBackground(True)
palette = QPalette()
palette.setBrush(QPalette.Background, QBrush(SXLookFeelConst.default_bg_color))
self.setPalette(palette)
global_layout = QGridLayout()
global_layout.setMargin(SXLookFeelConst.grid_margin)
global_layout.setSpacing(SXLookFeelConst.grid_spacing)
global_layout.setRowStretch(global_row_span - 1, global_layout.rowStretch(global_row_origin) + 1)
header_layout = QGridLayout()
header_layout.setMargin(SXLookFeelConst.grid_margin)
header_layout.setSpacing(SXLookFeelConst.grid_spacing)
const_set_layout = QGridLayout()
const_set_layout.setMargin(SXLookFeelConst.grid_margin)
const_set_layout.setSpacing(SXLookFeelConst.grid_spacing)
btn_layout = QGridLayout()
btn_layout.setMargin(SXLookFeelConst.grid_margin)
btn_layout.setSpacing(SXLookFeelConst.grid_spacing * 2)
global_grid_row = global_row_origin
# Start add title widgets to the grid layout
header_grid_row = header_row_origin
# Set a label and its position in this tab
temp_label = QLabel("%s" % (self.sxconst_set.label))
temp_label.setMinimumWidth(title_min_width)
header_layout.addWidget(temp_label, header_grid_row, header_col_origin, title_row_span, title_col_span)
header_grid_row += 1
# NOTE: 2015/11/17 Toshio Moriya
# Necessary to separate "%s" from the information for avoiding to invoke the tag interpretations of string
# e.g. < becomes the escape character
temp_label = QLabel("%s" % (self.sxconst_set.short_info))
temp_label.setWordWrap(True)
temp_label.setMinimumWidth(short_info_min_width)
temp_label.setMinimumHeight(short_info_min_height)
header_layout.addWidget(temp_label, header_grid_row, header_col_origin, short_info_row_span, short_info_col_span)
# Add const set grid layout to global layout
global_layout.addLayout(header_layout, global_grid_row, global_col_origin)
global_grid_row += 1
# Start add project parameter constant widgets to the grid layout
const_set_grid_row = const_set_row_origin
# Add widget for editing command args and options
for sxconst in self.sxconst_set.list:
# Create widget associated to this project constant parameter
temp_label = QLabel(sxconst.label)
temp_label.setMinimumWidth(const_label_min_width)
const_set_layout.addWidget(temp_label, const_set_grid_row, const_set_col_origin, const_label_row_span, const_label_col_span)
sxconst_register_widget = QPushButton("%s" % sxconst.register)
sxconst_register_widget.setMinimumWidth(const_register_widget_min_width)
custom_style = "QPushButton {color:green; }"
sxconst_register_widget.setStyleSheet(custom_style)
const_set_layout.addWidget(sxconst_register_widget, const_set_grid_row, const_set_row_origin + const_label_col_span, const_register_widget_row_span, const_register_widget_col_span)
sxconst_register_widget.setToolTip(''+"Retrieve this registered value to edit box"+'')
self.connect(sxconst_register_widget, SIGNAL("clicked()"), partial(self.handle_regster_widget_event, sxconst))
sxconst_widget = QLineEdit()
sxconst_widget.setMinimumWidth(const_widget_min_width)
sxconst_widget.setText(sxconst.register)
sxconst_widget.setToolTip(''+sxconst.help+'')
const_set_layout.addWidget(sxconst_widget, const_set_grid_row, const_set_row_origin + const_label_col_span + const_register_widget_col_span, const_widget_row_span, const_widget_col_span)
const_set_grid_row += 1
# Register this widget
sxconst.register_widget = sxconst_register_widget
sxconst.widget = sxconst_widget
# Add const set grid layout to global layout
global_layout.addLayout(const_set_layout, global_grid_row, global_col_origin)
# global_grid_row += 1
# Start add buttons to the grid layout
btn_grid_row = btn_row_origin
# Add a register button
self.execute_btn = QPushButton("Register settings")
# make 3D textured push button look
custom_style = "QPushButton {font: bold; color: #000;border: 1px solid #333;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #8D0);min-width:90px;margin:5px} QPushButton:pressed {font: bold; color: #000;border: 1px solid #333;border-radius: 11px;padding: 2px;background: qradialgradient(cx: 0, cy: 0,fx: 0.5, fy:0.5,radius: 1, stop: 0 #fff, stop: 1 #084);min-width:90px;margin:5px}"
self.execute_btn.setStyleSheet(custom_style)
self.execute_btn.setMinimumWidth(func_btn_min_width * register_btn_col_span)
self.execute_btn.setToolTip(''+"Register project constant parameter settings to automatically set values to command arguments and options"+'')
self.connect(self.execute_btn, SIGNAL("clicked()"), self.register_const_set)
btn_layout.addWidget(self.execute_btn, btn_grid_row, btn_col_origin, register_btn_row_span, register_btn_col_span)
btn_grid_row += 1
# Add save project constant parameter settings button
self.save_consts_btn = QPushButton("Save settings")
self.save_consts_btn.setMinimumWidth(func_btn_min_width)
self.save_consts_btn.setToolTip(''+"Save project constant parameter settings"+'')
self.connect(self.save_consts_btn, SIGNAL("clicked()"), self.save_consts)
btn_layout.addWidget(self.save_consts_btn, btn_grid_row, btn_col_origin, func_btn_row_span, func_btn_col_span)
# Add load project constant parameter settings button
self.load_consts_btn = QPushButton("Load settings")
self.load_consts_btn.setMinimumWidth(func_btn_min_width)
self.load_consts_btn.setToolTip(''+"Load project constant parameter settings to retrieve the previously-saved one"+'')
self.connect(self.load_consts_btn, SIGNAL("clicked()"), self.load_consts)
btn_layout.addWidget(self.load_consts_btn, btn_grid_row, btn_col_origin + func_btn_col_span, func_btn_row_span, func_btn_col_span)
btn_grid_row += 1
# Add button grid layout to global layout
# global_layout.addLayout(btn_layout, global_grid_row, global_col_origin) # Maybe later :)
# Load the previously saved parameter setting of this sx command
if os.path.exists(self.gui_settings_file_path):
self.read_consts(self.gui_settings_file_path)
# Layout for a constant size
constant_height_layout = QVBoxLayout()
constant_height_layout.addLayout(global_layout)
constant_height_layout.addLayout(btn_layout)
constant_height_layout.addStretch(1)
constant_width_layout = QHBoxLayout(self)
constant_width_layout.addLayout(constant_height_layout)
constant_width_layout.addStretch(1)
def handle_regster_widget_event(self, sxconst):
sxconst.widget.setText(sxconst.register)
def register_const_set(self):
# Loop through all project constant parameters
for sxconst in self.sxconst_set.list:
sxconst.register = sxconst.widget.text()
sxconst.register_widget.setText("%s" % sxconst.register)
# Loop through all command categories
for sxcmd_category in self.sxcmd_category_list:
# Loop through all commands of this category
for sxcmd in sxcmd_category.cmd_list:
# Loop through all command tokens of this command
for cmd_token in sxcmd.token_list:
if not cmd_token.is_locked and cmd_token.type in self.sxconst_set.dict.keys():
sxconst = self.sxconst_set.dict[cmd_token.type]
cmd_token.restore = sxconst.register
cmd_token.restore_widget.setText("%s" % cmd_token.restore)
cmd_token.widget.setText(cmd_token.restore)
# print "MRK_DEBUG: %s, %s, %s, %s, %s, %s" % (sxcmd.name, sxcmd.subname, cmd_token.key_base, cmd_token.type, cmd_token.default, cmd_token.restore)
# Save the current state of GUI settings
if os.path.exists(SXLookFeelConst.project_dir) == False:
os.mkdir(SXLookFeelConst.project_dir)
self.write_consts(self.gui_settings_file_path)
def write_consts(self, file_path_out):
file_out = open(file_path_out,"w")
# Write script name for consistency check upon loading
file_out.write("@@@@@ project settings gui settings - ")
file_out.write(EMANVERSION + " (GITHUB: " + DATESTAMP +")" )
file_out.write(" @@@@@ \n")
# Loop through all project constant parameters
for sxconst in self.sxconst_set.list:
# The other type has only one line edit box
val_str = str(sxconst.widget.text())
file_out.write("<%s> %s (registered %s) == %s \n" % (sxconst.key, sxconst.label, sxconst.register, val_str))
file_out.close()
def read_consts(self, file_path_in):
file_in = open(file_path_in,"r")
# Check if this parameter file is for this sx script
line_in = file_in.readline()
if line_in.find("@@@@@ project settings gui settings") != -1:
n_function_type_lines = 2
function_type_line_counter = 0
# loop through the rest of lines
for line_in in file_in:
# Extract label (which should be left of "=="). Also strip the ending spaces
label_in = line_in.split("==")[0].strip()
# Extract value (which should be right of "=="). Also strip all spaces
val_str_in = line_in.split("==")[1].strip()
# Extract key_base of this command token
target_operator = "<"
item_tail = label_in.find(target_operator)
if item_tail != 0:
QMessageBox.warning(self, "Invalid Project Settings File Format", "Project settings entry should start from \"%s\" for entry key in line (%s). The format of this file might be corrupted. Please save the project settings file again." % (target_operator, line_in))
label_in = label_in[item_tail + len(target_operator):].strip() # Get the rest of line
target_operator = ">"
item_tail = label_in.find(target_operator)
if item_tail == -1:
QMessageBox.warning(self, "Invalid Project Settings File Format", "Project settings entry should have \"%s\" closing entry key in line (%s) The format of this file might be corrupted. Please save the project settings file again." % (target_operator, line_in))
key = label_in[0:item_tail]
# Get corresponding sxconst
if key not in self.sxconst_set.dict.keys():
QMessageBox.warning(self, "Invalid Project Settings File Format", "Invalid entry key for project settings \"%s\" is found in line (%s). This project settings file might be imcompatible with the current version. Please save the project settings file again." % (key, line_in))
sxconst = self.sxconst_set.dict[key]
sxconst.widget.setText(val_str_in)
else:
QMessageBox.warning(self, "Fail to load project settings", "The specified file is not project settings file.")
file_in.close()
def save_consts(self):
file_path_out = str(QFileDialog.getSaveFileName(self, "Save settings", SXLookFeelConst.file_dialog_dir, options = QFileDialog.DontUseNativeDialog))
if file_path_out != "":
self.write_consts(file_path_out)
def load_consts(self):
file_path_in = str(QFileDialog.getOpenFileName(self, "Load settings", SXLookFeelConst.file_dialog_dir, options = QFileDialog.DontUseNativeDialog))
if file_path_in != "":
self.read_consts(file_path_in)
# ========================================================================================
# Layout of the information widget; owned by the main window
class SXInfoWidget(QWidget):
def __init__(self, parent = None):
super(SXInfoWidget, self).__init__(parent)
self.setStyleSheet("background-color: {0}".format(SXLookFeelConst.default_bg_color_string))
widget = QWidget(self)
# Get the picture name
pic_name = '{0}sxgui_info.png'.format(get_image_directory())
# Import the picture as pixmap to get the right dimensions
self.pixmap = QPixmap(pic_name)
width = self.pixmap.width()
height = self.pixmap.height()
# Scrol widget
scroll_widget = QWidget()
scroll_widget.setStyleSheet('background-color: transparent')
label1 = QLabel()
label1.setFixedHeight(40)
label2 = QLabel()
label2.setFixedHeight(40)
# Create a QLabel and show the picture
self.label = QLabel()
self.label.setFixedSize(width, height)
self.label.setStyleSheet('border-image: url("{0}"); background-color: transparent'.format(pic_name))
# Layout for the scroll widet vert
label3 = QLabel()
label3.setFixedWidth(40)
label4 = QLabel()
label4.setFixedWidth(40)
# Layout for the scroll widget hor
layout_vert = QHBoxLayout()
layout_vert.addWidget(label3)
layout_vert.addWidget(self.label)
layout_vert.addWidget(label4)
# Layout for the scroll widget hor
layout = QVBoxLayout(scroll_widget)
layout.addWidget(label1)
layout.addLayout(layout_vert)
layout.addWidget(label2)
# Add a scroll area for vertical scrolling
scroll_area = QScrollArea(widget)
scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
scroll_area.setWidget(scroll_widget)
scroll_area.setStyleSheet("background-color: {0}".format(SXLookFeelConst.sxcmd_tab_bg_color_string))
layout = QHBoxLayout(widget)
layout.addWidget(scroll_area, stretch=1)
layout = QHBoxLayout(self)
layout.addWidget(widget)
layout.setContentsMargins(0, 0, 0, 0)
# ========================================================================================
# Main Window (started by class SXApplication)
class SXMainWindow(QMainWindow): # class SXMainWindow(QWidget):
def __init__(self, parent = None):
super(SXMainWindow, self).__init__(parent)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# class variables
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
self.sxinfo = None
self.sxconst_set = None
self.sxcmd_category_list = None
self.cur_sxmenu_item = None
self.sxmenu_item_widget_stacked_layout = None
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# --------------------------------------------------------------------------------
# Construct menu items
# --------------------------------------------------------------------------------
self.construct_sxinfo() # Construct application information
self.construct_sxconst_set() # Construct project constant set for project settings
self.construct_sxcmd_category_list() # Construct list of categorised sxscript objects (extracted from associated wiki documents)
# --------------------------------------------------------------------------------
# Setup Window Layout
# --------------------------------------------------------------------------------
background_image_file_path = '{0}sxgui_background.png'.format(get_image_directory())
# Central widget
central_widget = QWidget(self)
central_widget.setObjectName('central')
central_widget.setStyleSheet(
'QWidget#central {{background-image: url("{0}")}}'.format(background_image_file_path)
)
self.setCentralWidget(central_widget)
# Layout for central widget
central_layout = QHBoxLayout(central_widget)
central_widget.setLayout(central_layout)
# --------------------------------------------------------------------------------
# Construct and add a widget for menu item button area (containing all menu item buttons)
# --------------------------------------------------------------------------------
sxmenu_item_btn_area_widget = SXMenuItemBtnAreaWidget(self.sxconst_set, self.sxcmd_category_list, self.sxinfo, central_widget)
central_layout.addWidget(sxmenu_item_btn_area_widget)
# --------------------------------------------------------------------------------
# Construct and add widgets for menu item widget area (containing all menu item widgets)
# --------------------------------------------------------------------------------
# Stacked layout for sx menu item widgets area
self.sxmenu_item_widget_stacked_layout = QStackedLayout()
central_layout.addLayout(self.sxmenu_item_widget_stacked_layout, stretch = 1)
# Construct and add widgets for sx command categories
for sxcmd_category in self.sxcmd_category_list:
# Create SXCmdCategoryWidget for this command category
sxcmd_category.widget = SXCmdCategoryWidget(self.sxconst_set, sxcmd_category)
self.sxmenu_item_widget_stacked_layout.addWidget(sxcmd_category.widget)
# Construct and add a widget for project constants settings
self.sxconst_set.widget = SXConstSetWidget(self.sxconst_set, self.sxcmd_category_list)
self.sxmenu_item_widget_stacked_layout.addWidget(self.sxconst_set.widget)
# Construct and add a widget for GUI application information
self.sxinfo.widget = SXInfoWidget()
self.sxmenu_item_widget_stacked_layout.addWidget(self.sxinfo.widget)
# --------------------------------------------------------------------------------
# Set up event handler of all menu item buttons
# --------------------------------------------------------------------------------
for sxcmd_category in self.sxcmd_category_list:
sxcmd_category.btn.clicked.connect(partial(self.handle_sxmenu_item_btn_event, sxcmd_category))
self.sxconst_set.btn.clicked.connect(partial(self.handle_sxmenu_item_btn_event, self.sxconst_set))
self.sxinfo.btn.clicked.connect(partial(self.handle_sxmenu_item_btn_event, self.sxinfo))
# --------------------------------------------------------------------------------
# Register project constant parameter settings upon initialization
# --------------------------------------------------------------------------------
self.sxconst_set.widget.register_const_set()
# --------------------------------------------------------------------------------
# Load the previously saved parameter setting of all sx commands
# Override the registration of project constant parameter settings with the previously-saved one
# --------------------------------------------------------------------------------
for sxcmd_category in self.sxcmd_category_list:
sxcmd_category.widget.load_previous_session()
# --------------------------------------------------------------------------------
# Start widget
# --------------------------------------------------------------------------------
start_widget = QtGui.QWidget()
logo_container = QtGui.QWidget()
layout_start_widget = QtGui.QHBoxLayout()
layout_logo_container = QtGui.QVBoxLayout()
logo_container.setStyleSheet('border-image: url("{0}sxgui_pictograph_info.png")'.format(get_image_directory()))
logo_container.setFixedSize(100, 100)
layout_start_widget.setContentsMargins(0, 0, 0, 20)
layout_logo_container.addStretch(1)
layout_logo_container.addWidget(logo_container)
layout_start_widget.addLayout(layout_logo_container)
layout_start_widget.addStretch(1)
start_widget.setLayout(layout_start_widget)
self.sxmenu_item_widget_stacked_layout.addWidget(start_widget)
# --------------------------------------------------------------------------------
# Display application information upon startup
# --------------------------------------------------------------------------------
self.sxmenu_item_widget_stacked_layout.setCurrentWidget(start_widget)
# --------------------------------------------------------------------------------
# Get focus to main window
# --------------------------------------------------------------------------------
self.setFocus()
def construct_sxinfo(self):
sxinfo = SXmenu_item(); sxinfo.name = "GUI Information"; sxinfo.label = "GUI Appliation Information"; sxinfo.short_info = "DUMMY STRING"
# Store GUI application information as a class data member
self.sxinfo = sxinfo
def construct_sxconst_set(self):
sxconst_set = SXconst_set(); sxconst_set.name = "sxc_project"; sxconst_set.label = "Project Settings"; sxconst_set.short_info = "Set constant parameter values for this project. These constants will be used as default values of associated arguments and options in command settings. However, the project settings here are not required to run commands."
sxconst = SXconst(); sxconst.key = "protein"; sxconst.label = "Protein name"; sxconst.help = "a valid string for file names on your OS."; sxconst.register = "MY_PROTEIN"; sxconst.type = "string"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "apix"; sxconst.label = "Micrograph pixel size [A]"; sxconst.help = ""; sxconst.register = "1.0"; sxconst.type = "float"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "ctfwin"; sxconst.label = "CTF window size [pixels]"; sxconst.help = "it should be slightly larger than particle box size"; sxconst.register = "512"; sxconst.type = "int"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "box"; sxconst.label = "Particle box size [pixels]" ; sxconst.help = ""; sxconst.register = "-1"; sxconst.type = "int"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "radius"; sxconst.label = "Protein particle radius [pixels]"; sxconst.help = ""; sxconst.register = "-1"; sxconst.type = "int"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "sym"; sxconst.label = "Point-group symmetry"; sxconst.help = "e.g. c1, c4, d5"; sxconst.register = "c1"; sxconst.type = "string"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "mass"; sxconst.label = "Protein molecular mass [kDa]"; sxconst.help = ""; sxconst.register = "-1.0"; sxconst.type = "float"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
sxconst = SXconst(); sxconst.key = "config"; sxconst.label = "Imaging configurations"; sxconst.help = "a free-style string for your record. please use it to describe the set of imaging configurations used in this project (e.g. types of microscope, detector, enegy filter, abbration corrector, phase plate, and etc."; sxconst.register = "MY_MICROSCOPE"; sxconst.type = "int"; sxconst_set.list.append(sxconst); sxconst_set.dict[sxconst.key] = sxconst
# Store the project constant parameter set as a class data member
self.sxconst_set = sxconst_set
def construct_sxcmd_category_list(self):
sxcmd_category_list = []
sxcmd_list = [] # Used only within this function
sxcmd_category_dict = {} # Used only within this function
# Actual configurations of all sx command categories and sx commands are inserted into the following section by wikiparser.py
# as sxcmd_category_list and sxcmd_list
# @@@@@ START_INSERTION @@@@@
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_movie"; sxcmd_category.label = "Movie Micrograph"; sxcmd_category.short_info = "movie frame alignemnt, and drift assessment"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_cter"; sxcmd_category.label = "CTF"; sxcmd_category.short_info = "ctf estinatim, and ctf assessment"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_window"; sxcmd_category.label = "Particle Stack"; sxcmd_category.short_info = "particle picking, and particle windowing"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_isac"; sxcmd_category.label = "2D Clustering"; sxcmd_category.short_info = "2d clustering with isac, and post-processing"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_viper"; sxcmd_category.label = "Initial 3D Modeling"; sxcmd_category.short_info = "initial 3d modeling with viper/rviper"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_meridien"; sxcmd_category.label = "3D Refinement"; sxcmd_category.short_info = "3d refinement and post-processing"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_sort3d"; sxcmd_category.label = "3D Clustering"; sxcmd_category.short_info = "3d variability, and 3d clustering protocol I & II"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_localres"; sxcmd_category.label = "Local Resolution"; sxcmd_category.short_info = "local resolution, and local filter"
sxcmd_category_list.append(sxcmd_category)
sxcmd_category = SXcmd_category(); sxcmd_category.name = "sxc_utilities"; sxcmd_category.label = "Utilities"; sxcmd_category.short_info = "miscellaneous utlitity commands"
sxcmd_category_list.append(sxcmd_category)
sxcmd = SXcmd(); sxcmd.name = "sxgui_unblur"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Drift Assessment"; sxcmd.short_info = "Assess micrographs based on drift estimation produced by Unblur."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_movie"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Shift files"; token.help = "A wild card * can be used to process multiple shift files. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_shift_list_file"; token.key_prefix = ""; token.label = "Input shift list file"; token.help = "Extension of input shift list file must be '.txt'. If this is not provided, all files matched with the micrograph name pattern will be processed. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "txt"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_movie"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxunblur"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Micrograph Movie Alignment"; sxcmd.short_info = "Align frames of micrograph movies with Unblur."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_movie"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "unblur_path"; token.key_prefix = ""; token.label = "unblur executable path"; token.help = "Specify the file path of unblur executable. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_micrograph_pattern"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify path pattern of input micrographs with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). bdb files can not be selected as input micrographs. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "summovie_path"; token.key_prefix = "--"; token.label = "summovie executable path"; token.help = "Specify the file path of summovie executable. (This option is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify a name of micrograph selection list text file. The file extension must be '.txt'. If this is not provided, all files matched with the micrograph name pattern will be processed. (This option is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_frames"; token.key_prefix = "--"; token.label = "Number of movie frames"; token.help = "The number of movie frames in each input micrograph. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of input micrographs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "voltage"; token.key_prefix = "--"; token.label = "Microscope voltage [kV]"; token.help = "The acceleration voltage of microscope used for imaging. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "300.0"; token.restore = "300.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "exposure_per_frame"; token.key_prefix = "--"; token.label = "Per frame exposure [e/A^2]"; token.help = "The electron dose per frame in e/A^2. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pre_exposure"; token.key_prefix = "--"; token.label = "Pre-exposure [e/A^2]"; token.help = "The electron does in e/A^2 used for exposure prior to imaging. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_threads"; token.key_prefix = "--"; token.label = "Number of threads"; token.help = "The number of threads unblur can use. The higher the faster, but it requires larger memory. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "save_frames"; token.key_prefix = "--"; token.label = "Save aligned movie frames"; token.help = "Save aligned movie frames. This option slows down the process. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "skip_dose_filter"; token.key_prefix = "--"; token.label = "Apply dose filter"; token.help = "Indicate if dose filter should be applied or not. With this option, voltage, exposure per frame, and pre exposure will be ignored. By default, apply dose filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "expert_mode"; token.key_prefix = "--"; token.label = "Use expert mode"; token.help = "Requires initial shift, shift radius, b-factor, fourier_vertical, fourier_horizontal, shift threshold, iterations, restore noise, and verbosity options. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_initial"; token.key_prefix = "--"; token.label = "Minimum shift for initial search [A]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_radius"; token.key_prefix = "--"; token.label = "Outer radius shift limit [A]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "200.0"; token.restore = "200.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "b_factor"; token.key_prefix = "--"; token.label = "Apply B-factor to images [A^2]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1500.0"; token.restore = "1500.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fourier_vertical"; token.key_prefix = "--"; token.label = "Vertical Fourier central mask size"; token.help = "The half-width of central vertical line of Fourier mask. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fourier_horizontal"; token.key_prefix = "--"; token.label = "Horizontal Fourier central mask size"; token.help = "The half-width of central horizontal line of Fourier mask. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_threshold"; token.key_prefix = "--"; token.label = "Termination shift threshold"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "iterations"; token.key_prefix = "--"; token.label = "Maximum iterations"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "10"; token.restore = "10"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dont_restore_noise"; token.key_prefix = "--"; token.label = "Restore noise power"; token.help = "Indicate if noise power should be restored or not. By default, restore noise power. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxsummovie"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Micrograph Movie Summation"; sxcmd.short_info = "Sum micrograph movies with SumMovie."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_movie"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "summovie_path"; token.key_prefix = ""; token.label = "summovie executable path"; token.help = "Specify the file path of summovie executable. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_micrograph_pattern"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify path pattern of input micrographs with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). bdb files can not be selected as input micrographs. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_shift_pattern"; token.key_prefix = ""; token.label = "Input shift path pattern"; token.help = "Specify path pattern of input shift files with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). The substring at the variable part must be same between the associated pair of input micrograph and shift file. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify a name of micrograph selection list text file. The file extension must be '.txt'. If this is not provided, all files matched with the micrograph name pattern will be processed. (This option is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_frames"; token.key_prefix = "--"; token.label = "Number of movie frames"; token.help = "The number of movie frames in each input micrograph. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "first"; token.key_prefix = "--"; token.label = "First movie frame"; token.help = "First movie frame for summing. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "last"; token.key_prefix = "--"; token.label = "Last movie frame"; token.help = "Last movie frame for summing. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of input micrographs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_threads"; token.key_prefix = "--"; token.label = "Number of threads"; token.help = "The number of threads summovie can use. The higher the faster, but it requires larger memory. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apply_dose_filter"; token.key_prefix = "--"; token.label = "Apply dose filter"; token.help = "Requires voltage, exposure per frame, and pre exposure options. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "voltage"; token.key_prefix = "--"; token.label = "Microscope voltage [kV]"; token.help = "The acceleration voltage of microscope used for imaging. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "300.0"; token.restore = "300.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "exposure_per_frame"; token.key_prefix = "--"; token.label = "Per frame exposure [e/A^2]"; token.help = "The electron dose per frame in e/A^2. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pre_exposure"; token.key_prefix = "--"; token.label = "Pre-exposure [e/A^2]"; token.help = "The electron does in e/A^2 used for exposure prior to imaging. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dont_restore_noise"; token.key_prefix = "--"; token.label = "Restore noise power"; token.help = "Indicate if noise power should be restored or not. By default, restore noise power. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxcter"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "CTF Estimation"; sxcmd.short_info = "Automated estimation of CTF parameters with error assessment."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_cter"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_image_path"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify input micrographs path pattern with a wild card (*) for any of Micrograph Modes. Images of BDB format can not be used as input micrographs. As an advanced option, a particle stack file path can also be supplied here when using --stack_mode. However, Stack Mode is not supported by sxgui. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The CTF parameters (partres file), rotationally averaged power spectra (rotinf), and micrograph thumbnails (thumb files) will be written here. This directory will be created automatically and it must not exist previously. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify path of a micrograph selection list text file for Selected Micrographs Mode. The file extension must be '.txt'. Alternatively, the file name of a single micrograph can be specified for Single Micrograph Mode. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "wn"; token.key_prefix = "--"; token.label = "CTF window size [Pixels]"; token.help = "The size should be slightly larger than particle box size. This will be ignored in Stack Mode. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "512"; token.restore = "512"; token.type = "ctfwin"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apix"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of input micrograph(s) or images in input particle stack. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "Cs"; token.key_prefix = "--"; token.label = "Microscope spherical aberration (Cs) [mm]"; token.help = "The spherical aberration (Cs) of microscope used for imaging. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "voltage"; token.key_prefix = "--"; token.label = "Microscope voltage [kV]"; token.help = "The acceleration voltage of microscope used for imaging. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "300.0"; token.restore = "300.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ac"; token.key_prefix = "--"; token.label = "Amplitude contrast [%]"; token.help = "The typical amplitude contrast is in the range of 7% - 14%. The value mainly depends on the thickness of the ice embedding the particles. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10.0"; token.restore = "10.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "f_start"; token.key_prefix = "--"; token.label = "Lowest frequency [1/A]"; token.help = "Lowest frequency to be considered in the CTF estimation. Determined automatically by default. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "f_stop"; token.key_prefix = "--"; token.label = "Highest frequency [1/A]"; token.help = "Highest frequency to be considered in the CTF estimation. Determined automatically by default. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kboot"; token.key_prefix = "--"; token.label = "Number of CTF estimates per micrograph"; token.help = "Used for error assessment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "16"; token.restore = "16"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "overlap_x"; token.key_prefix = "--"; token.label = "X overlap [%]"; token.help = "Overlap between the windows in the x direction. This will be ignored in Stack Mode. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "50"; token.restore = "50"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "overlap_y"; token.key_prefix = "--"; token.label = "Y overlap [%]"; token.help = "Overlap between the windows in the y direction. This will be ignored in Stack Mode. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "50"; token.restore = "50"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "edge_x"; token.key_prefix = "--"; token.label = "Edge x [pixels]"; token.help = "Defines the edge of the tiling area in the x direction. Normally it does not need to be modified. This will be ignored in Stack Mode. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "edge_y"; token.key_prefix = "--"; token.label = "Edge y [pixels]"; token.help = "Defines the edge of the tiling area in the y direction. Normally it does not need to be modified. This will be ignored in Stack Mode. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "check_consistency"; token.key_prefix = "--"; token.label = "Check consistency of inputs"; token.help = "Create a text file containing the list of inconsistent Micrograph ID entries (i.e. inconsist_mic_list_file.txt). "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "debug_mode"; token.key_prefix = "--"; token.label = "Enable debug mode"; token.help = "Print out debug information. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "vpp"; token.key_prefix = "--"; token.label = "Estimate amplitude contrast"; token.help = "UNDER DEVELOPMENT! Also estimate phase shift as amplitude contrast. Use this option to estimate phase shift induced by Volta Phase Plate imaging. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "defocus_min"; token.key_prefix = "--"; token.label = "Minimum defocus search [um]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.3"; token.restore = "0.3"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "defocus_max"; token.key_prefix = "--"; token.label = "Maximum defocus search [um]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "9.0"; token.restore = "9.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "defocus_step"; token.key_prefix = "--"; token.label = "Defocus search step [um]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "phase_min"; token.key_prefix = "--"; token.label = "Minimum phase search [degrees]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5.0"; token.restore = "5.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "phase_max"; token.key_prefix = "--"; token.label = "Maximum phase search [degrees]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "175.0"; token.restore = "175.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "phase_step"; token.key_prefix = "--"; token.label = "Phase search step [degrees]"; token.help = "UNDER DEVELOPMENT! This is applicable only with --vpp option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5.0"; token.restore = "5.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pap"; token.key_prefix = "--"; token.label = "Use amplitude "; token.help = "UNDER DEVELOPMENT! Use amplitude for CTF parameter search instead of power spectrum. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxgui_cter"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "CTF Assessment"; sxcmd.short_info = "GUI tool to assess and sort micrographs using sxcter results."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_cter"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "cter_ctf_file"; token.key_prefix = ""; token.label = "File containing CTF parameters"; token.help = "This file is produced by sxcter and normally called partres.txt "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "txt"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_cter"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2boxer_old"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Particle Coordinates"; sxcmd.short_info = "Generate files containing particle coordinates for all input micrographs by picking particles manual and/or automatically."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_window"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_micrograph_list"; token.key_prefix = ""; token.label = "Input micrographs"; token.help = "Wild cards (e.g. *) can be used to specify a list of micrographs. Not recommended if the number is very large. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "boxsize"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "invert"; token.key_prefix = "--"; token.label = "Invert contrast"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxwindow"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Particle Extraction"; sxcmd.short_info = "Window particles from micrographs using the particle coordinates."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_window"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_micrograph_pattern"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify path pattern of input micrographs with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). The substring at the variable part must be same between the associated pair of input micrograph and coordinates file. bdb files can not be selected as input micrographs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_coordinates_pattern"; token.key_prefix = ""; token.label = "Input coordinates path pattern"; token.help = "Specify path pattern of input coordinates files with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). The substring at the variable part must be same between the associated pair of input micrograph and coordinates file. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_ctf_params_source"; token.key_prefix = ""; token.label = "CTF parameters source"; token.help = "Specify the file produced by sxcter and normally called partres.txt for cryo data. For negative staining data, enter pixel size [A/Pixels]. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify a name of micrograph selection list text file for Selected Micrographs Mode. The file extension must be '.txt'. Alternatively, the file name of a single micrograph can be specified for Single Micrograph Mode. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "coordinates_format"; token.key_prefix = "--"; token.label = "Coordinate file format"; token.help = "Allowed values are 'sparx', 'eman1', 'eman2', or 'spider'. The sparx, eman2, and spider formats use the particle center as coordinates. The eman1 format uses the lower left corner of the box as coordinates. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "eman1"; token.restore = "eman1"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Particle box size [Pixels]"; token.help = "The x and y dimensions of square area to be windowed. The box size after resampling is assumed when resample_ratio < 1.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "256"; token.restore = "256"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "skip_invert"; token.key_prefix = "--"; token.label = "Invert image contrast"; token.help = "Indicate if image contrast should be inverted or not. Do not invert for negative staining data. By default, the image contrast will be inverted for cryo data. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "limit_ctf"; token.key_prefix = "--"; token.label = "Use CTF limit filter"; token.help = "Frequencies where CTF oscillations can not be properly modeled with the resampled pixel size will be discarded in the images with the appropriate low-pass filter. This has no effects when the CTER CTF File is not specified by the CTF paramters source argument. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "astigmatism_error"; token.key_prefix = "--"; token.label = "Astigmatism error limit [Degrees]"; token.help = "Set astigmatism to zero for all micrographs where the angular error computed by sxcter is larger than the desired value. This has no effects when the CTER CTF File is not specified by the CTF paramters source argument. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "360.0"; token.restore = "360.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "resample_ratio"; token.key_prefix = "--"; token.label = "Image size reduction factor (<1)"; token.help = "Use a value between 0.0 and 1.0 (excluding 0.0). The new pixel size will be automatically recalculated and stored in CTF paramers when resample_ratio < 1.0 is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "check_consistency"; token.key_prefix = "--"; token.label = "Check consistency of inputs"; token.help = "Create a text file containing the list of inconsistent Micrograph ID entries (i.e. inconsist_mic_list_file.txt). "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2bdb"; sxcmd.subname = ""; sxcmd.mode = "makevstack"; sxcmd.label = "Particle Stack"; sxcmd.short_info = "Make a 'virtual' BDB image stack with the specified name from one or more other stacks. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_window"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "makevstack"; token.key_prefix = "--"; token.label = "Output virtual image stack"; token.help = "Make a 'virtual' BDB image stack with the specified name from one or more other stacks. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_bdb_stack_pattern"; token.key_prefix = ""; token.label = "Input BDB image stack pattern"; token.help = "Specify file path pattern of stack subsets created in particle extraction using a wild card /'*/' (e.g. /'//sxwindow_output_dir//*/'). The stack subsets are located in the sxwindow output directory."; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "any_directory"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_window"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxisac2"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "ISAC - 2D Clustering"; sxcmd.short_info = "Iterative Stable Alignment and Clustering (ISAC) of a 2D image stack."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_isac"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack_file"; token.key_prefix = ""; token.label = "Input image stack"; token.help = "The images must to be square (''nx''=''ny''). The stack can be either in bdb or hdf format. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The directory will be automatically created and the results will be written here. If the directory already exists, results will be written there, possibly overwriting previous runs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Radius of the particle (pixels). There is no default value and so a sensible number has to be provided. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "img_per_grp"; token.key_prefix = "--"; token.label = "Images per class"; token.help = "Number of images per class in an ideal situation. In practice, it defines the maximum size of the classes or the number of classes K= [total number of images]/[img_per_grp]. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "200"; token.restore = "200"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "CTF"; token.key_prefix = "--"; token.label = "CTF phase flipping"; token.help = "If set, the data will be phase-flipped using CTF information included in the image headers. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "Translation search range [Pixels]"; token.help = "The translational search range. Set by the program by default. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "thld_err"; token.key_prefix = "--"; token.label = "Pixel error threshold [Pixels]"; token.help = "Used for checking stability. It is defined as the root mean square of distances between corresponding pixels from set of found transformations and theirs average transformation, depends linearly on square of radius (parameter ou). units - pixels. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.7"; token.restore = "0.7"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "target_radius"; token.key_prefix = "--"; token.label = "Target particle radius [Pixels]"; token.help = "Particle radius used by isac to process the data. The images will be resized to fit this radius "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "29"; token.restore = "29"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "target_nx"; token.key_prefix = "--"; token.label = "Target particle image size [Pixels]"; token.help = "Image size used by isac to process the data. The images will be resized according to target particle radius and then cut/padded to achieve the target image size. When xr > 0, the final image size for isac processing is 'target_nx + xr - 1' "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "76"; token.restore = "76"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ir"; token.key_prefix = "--"; token.label = "Inner ring [Pixels]"; token.help = "Inner of the resampling to polar coordinates. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "rs"; token.key_prefix = "--"; token.label = "Ring step [Pixels]"; token.help = "Step of the resampling to polar coordinates. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Search step [Pixels]"; token.help = "Translational search step. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit"; token.key_prefix = "--"; token.label = "Reference-free alignment iterations"; token.help = "The number of iterations for reference-free alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "30"; token.restore = "30"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center_method"; token.key_prefix = "--"; token.label = "Centering method"; token.help = "Method to center global 2D average during the initial prealignment of the data (0 : no centering; -1 : average shift method; please see center_2D in utilities.py for methods 1-7). "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dst"; token.key_prefix = "--"; token.label = "Discrete angle used for within-group alignment"; token.help = "Discrete angle used for within-group alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "90.0"; token.restore = "90.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "FL"; token.key_prefix = "--"; token.label = "Lowest filter frequency [1/Pixel]"; token.help = "Lowest frequency used for the tangent filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.2"; token.restore = "0.2"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "FH"; token.key_prefix = "--"; token.label = "Highest filter frequency [1/Pixel]"; token.help = "Highest frequency used for the tangent filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.45"; token.restore = "0.45"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "FF"; token.key_prefix = "--"; token.label = "Tangent filter fall-off"; token.help = "The fall-off of the tangent filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.2"; token.restore = "0.2"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "init_iter"; token.key_prefix = "--"; token.label = "Maximum generations"; token.help = "Maximum number of generation iterations performed for a given subset. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "7"; token.restore = "7"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "iter_reali"; token.key_prefix = "--"; token.label = "SAC stability check interval"; token.help = "Defines every how many iterations the SAC stability checking is performed. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "stab_ali"; token.key_prefix = "--"; token.label = "Number of alignments for stability check"; token.help = "The number of alignments when checking stability. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "minimum_grp_size"; token.key_prefix = "--"; token.label = "Minimum size of reproducible class"; token.help = "Minimum size of reproducible class. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "60"; token.restore = "60"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "rand_seed"; token.key_prefix = "--"; token.label = "Seed"; token.help = "Random seed set before calculations. Useful for testing purposes. By default, isac sets a random seed number. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "skip_prealignment"; token.key_prefix = "--"; token.label = "Do pre-alignment"; token.help = "Indicate if pre-alignment should be used or not. Do not use pre-alignment if images are already centered. The 2dalignment directory will still be generated but the parameters will be zero. By default, do pre-alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "restart"; token.key_prefix = "--"; token.label = "Restart run"; token.help = "0: Restart ISAC2 after the last completed main iteration (i.e. the directory must contain ''finished'' file); k: Restart ISAC2 after k-th main iteration, it has to be completed (i.e. the directory must contain ''finished'' file), and higer iterations will be removed; Default: Do not restart. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxpipe"; sxcmd.subname = "isac_substack"; sxcmd.mode = ""; sxcmd.label = "Create Stack Subset"; sxcmd.short_info = "Create virtual subset stack consisting of ISAC accounted particles by retrieving particle numbers associated with the class averages. The command also saves a list text file containing the retrieved original image numbers."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_isac"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_bdb_stack_path"; token.key_prefix = ""; token.label = "Input BDB image stack"; token.help = "Specify the same BDB image stack used for the associated ISAC run. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "bdb"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_isac_class_avgs_path"; token.key_prefix = ""; token.label = "ISAC class average file path"; token.help = "Input ISAC class average file path. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "isac_class_id"; token.key_prefix = "--"; token.label = "ISAC class average ID"; token.help = "Retrieve only particle members of the specifed ISAC class. By default, retrieve from all classes. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "no_virtual_stack"; token.key_prefix = "--"; token.label = "Create virtual stack"; token.help = "Indicate if virtual stack should be created or not. By default, create virtual stack. Otherwise, create only the particle ID list text file associated with the ISAC class averages. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxcompute_isac_avg"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Beautifier"; sxcmd.short_info = "Beautify the ISAC 2D clustering result with the original pixel size."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_isac"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack"; token.key_prefix = "--"; token.label = "Original image stack"; token.help = "Data stack that ISAC run used. Particles required to create the full-sized class averages. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "isac_dir"; token.key_prefix = "--"; token.label = "Isac output directory"; token.help = "Path to ISAC run output directory. This is input directory for this command. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "directory"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_dir"; token.key_prefix = "--"; token.label = "Output directory"; token.help = "The directory will be automatically created and the results will be written here. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "There is no default radius. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "noctf"; token.key_prefix = "--"; token.label = "CTF correction"; token.help = "Indicate if full CTF correction should be applied or not. Always use the CTF correction for cryo data, but not for negative stained data. By default, do full CTF correction. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nopwadj"; token.key_prefix = "--"; token.label = "Adjust PW spectrum"; token.help = "Indicate if PW spectrum should be adjusted or not. The program adjust PW spectrum to analytical model or given 1-D rotationally averaged power spectrum. By default, apply PW spectrum adjustment to model. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "modelpw"; token.key_prefix = "--"; token.label = "Reference 1D PW spectrum"; token.help = "A text file contains 1-D rotational averaged power spectrum of either PDB model or EM map sampled in the given pixel_size and in original image size. By default, use the analytic model (See Penczek Methods Enzymol 2010) "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_enhance"; token.key_prefix = "--"; token.label = "Apply B-factor enhancement"; token.help = "Apply B-factor enhancement. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_start"; token.key_prefix = "--"; token.label = "B-factor lower limit [A]"; token.help = "Lower limit for B-factor estimation. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "10.0"; token.restore = "10.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "Bfactor"; token.key_prefix = "--"; token.label = "Use ad-hoc B-factor [A^2]"; token.help = "Skip the automatic estimation and use user-provided ad-hoc B-factor for the enhancement. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "45.0"; token.restore = "45.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency"; token.help = "Cutoff frequency of low-pass filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of images in input particle stack for isac. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "Local X search range [Pixels]"; token.help = "Translational search range in the x direction for local alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Local search step [Pixels]"; token.help = "Translational search step for local alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fh"; token.key_prefix = "--"; token.label = "High frequency search limit"; token.help = "High frequency search limit for local alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit"; token.key_prefix = "--"; token.label = "Local alignment iterations"; token.help = "The number of iterations for local aligment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "navg"; token.key_prefix = "--"; token.label = "Number of averages"; token.help = "The number of averages. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_isac"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxrviper"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Initial 3D Model - RVIPER"; sxcmd.short_info = "Reproducible ''ab initio'' 3D structure determination. The program is designed to determine a validated initial intermediate resolution structure using a small set (less than 100) of class averages produced by ISAC."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack"; token.key_prefix = ""; token.label = "Input images stack"; token.help = "A small set (less than 100) of class averages produced by ISAC. The images must be square and the stack must be hdf format . "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "hdf"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The directory will be automatically created and the results will be written here. If the directory already exists, results will be written there, possibly overwriting previous runs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Target particle radius [Pixels]"; token.help = "Use the same value as in ISAC. It has to be less than half the box size. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "29"; token.restore = "29"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "n_rv_runs"; token.key_prefix = "--"; token.label = "RVIPER iterations"; token.help = "Corresponds to main### output directory. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10"; token.restore = "10"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "iteration_start"; token.key_prefix = "--"; token.label = "Restarting iteration"; token.help = "Iteration from which to restart the program. 0 means go to the most recent one. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "n_v_runs"; token.key_prefix = "--"; token.label = "Minimun VIPER runs per RVIPER iterations"; token.help = "Corresponds to run### output directory. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "npad"; token.key_prefix = "--"; token.label = "Image padding factor"; token.help = "The images are padded to achieve the original size times this option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2"; token.restore = "2"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "criterion_name"; token.key_prefix = "--"; token.label = "Stable projection criterion"; token.help = "Used to decide if the volumes have a set of stable projections. Valid options are - '80th percentile', or 'fastest increase in the last quartile'. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "80th percentile"; token.restore = "80th percentile"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "outlier_index_threshold_method"; token.key_prefix = "--"; token.label = "Outlier selection method"; token.help = "Used to decide which images to keep. Valid options are - 'discontinuity_in_derivative', 'percentile', or 'angle_measure'. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "discontinuity_in_derivative"; token.restore = "discontinuity_in_derivative"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "angle_threshold"; token.key_prefix = "--"; token.label = "Angle threshold"; token.help = "Threshold used to remove projections if 'angle_measure' is used to decide the outliers. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "30"; token.restore = "30"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "outlier_percentile"; token.key_prefix = "--"; token.label = "Percentile for outlier"; token.help = "Threshold above which images are considered outliers and removed if 'percentile' is used as outlier selection method. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "95.0"; token.restore = "95.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ir"; token.key_prefix = "--"; token.label = "Inner rotational search radius [Pixels]"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "rs"; token.key_prefix = "--"; token.label = "Ring step size"; token.help = "Step between rings used for the rotational search. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "X search range [Pixels]"; token.help = "The translational search range in the x direction will take place in a +/xr range. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'0'"; token.restore = "'0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "yr"; token.key_prefix = "--"; token.label = "Y search range [Pixels]"; token.help = "The translational search range in the y direction. If omitted it will be xr. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'0'"; token.restore = "'0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Translational search step [Pixels]"; token.help = "The search will be performed in -xr, -xr+ts, 0, xr-ts, xr, can be fractional. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'1.0'"; token.restore = "'1.0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "delta"; token.key_prefix = "--"; token.label = "Projection angular step [Degrees]"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'2.0'"; token.restore = "'2.0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center"; token.key_prefix = "--"; token.label = "Center 3D template"; token.help = "0: no centering; 1: center of gravity. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit1"; token.key_prefix = "--"; token.label = "Maximum iterations - GA step"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "400"; token.restore = "400"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit2"; token.key_prefix = "--"; token.label = "Maximum iterations - Finish step"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "50"; token.restore = "50"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask3D"; token.key_prefix = "--"; token.label = "3D mask"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "sphere"; token.restore = "sphere"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "moon_elimination"; token.key_prefix = "--"; token.label = "Eliminate disconnected regions"; token.help = "Used to removed disconnected pieces from the model. It requires as argument a comma separated string with the mass in KDa and the pixel size. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "L2threshold"; token.key_prefix = "--"; token.label = "GA stop threshold"; token.help = "Defines the maximum relative dispersion of volumes' L2 norms. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.03"; token.restore = "0.03"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ref_a"; token.key_prefix = "--"; token.label = "Projection generation method"; token.help = "Method for generating the quasi-uniformly distributed projection directions. S- Saff algorithm, or P - Penczek 1994 algorithm. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "S"; token.restore = "S"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "n_shc_runs"; token.key_prefix = "--"; token.label = "GA population size"; token.help = "This defines the number of quasi-independent volumes generated. (same as '--nruns' parameter from sxviper.py). "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "4"; token.restore = "4"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "doga"; token.key_prefix = "--"; token.label = "Threshold to start GA"; token.help = "Do GA when the fraction of orientation that changes less than 1.0 degrees is at least this fraction. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixels]"; token.help = "Using a hyperbolic tangent low-pass filter. Specify with absolute frequency. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.25"; token.restore = "0.25"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "aa"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixels]"; token.help = "Fall-off of for the hyperbolic tangent low-pass filter. Specify with absolute frequency. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pwreference"; token.key_prefix = "--"; token.label = "Power spectrum reference"; token.help = "Text file containing a 1D reference power spectrum. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "changesize"; sxcmd.label = "Change Size of VIPER Model"; sxcmd.short_info = "Change size of image or volume (resample, decimation or interpolation up). The process also changes the pixel size and window size accordingly. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "changesize"; token.key_prefix = "--"; token.label = "Change size of VIPER model"; token.help = "Change size of image or volume (resample, decimation or interpolation up). The process also changes the pixel size and window size accordingly. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_stack"; token.key_prefix = ""; token.label = "Input Viper Model"; token.help = "Input Viper Model."; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_stack"; token.key_prefix = ""; token.label = "Output Resized Viper Model"; token.help = "Output resized (decimated or interpolated up) Viper Model."; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ratio"; token.key_prefix = "--"; token.label = "Ratio of new to old image size"; token.help = "if < 1, the pixel size will increase and image size decrease. if > 1, the other way round. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2proc3d"; sxcmd.subname = ""; sxcmd.mode = "clip"; sxcmd.label = "Window VIPER Model"; sxcmd.short_info = "Window (pad or clip) volume to the specific dimensions. Specify 1, 3 or 6 arguments; '[,,[,,,]]'. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "clip"; token.key_prefix = "--"; token.label = "Window to specified size [Pixels]"; token.help = "Window (pad or clip) volume to the specific dimensions. Specify 1, 3 or 6 arguments; '[,,[,,,]]'. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input volume file name."; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_file"; token.key_prefix = ""; token.label = "Output windowed volume"; token.help = "Output windowed (clipped/padded) volume file name."; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxviper"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Initial 3D Model - VIPER"; sxcmd.short_info = "''ab initio'' 3D structure determination using Validation of Individual Parameter Reproducibility (VIPER). Designed to determine a validated initial model using a small set of class averages produced by ISAC."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_alt"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack"; token.key_prefix = ""; token.label = "Input images stack"; token.help = "A small set of Class averages produced by ISAC. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The directory will be automatically created and the results will be written here. If the directory already exists, results will be written there, possibly overwriting previous runs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Target particle radius [Pixels]"; token.help = "Use the same value as in ISAC. It has to be less than half the box size. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "29"; token.restore = "29"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ir"; token.key_prefix = "--"; token.label = "Inner rotational search radius [Pixels]"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "rs"; token.key_prefix = "--"; token.label = "Ring step size"; token.help = "Step between rings used for the rotational search. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "X search range [Pixels]"; token.help = "The translational search range in the x direction will take place in a +/xr range. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'0'"; token.restore = "'0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "yr"; token.key_prefix = "--"; token.label = "Y search range [Pixels]"; token.help = "The translational search range in the y direction. If omitted it will be xr. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'0'"; token.restore = "'0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Translational search step [Pixels]"; token.help = "The search will be performed in -xr, -xr+ts, 0, xr-ts, xr, can be fractional. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'1.0'"; token.restore = "'1.0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "delta"; token.key_prefix = "--"; token.label = "Projection angular step [Degrees]"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'2.0'"; token.restore = "'2.0'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center"; token.key_prefix = "--"; token.label = "Center 3D template"; token.help = "0: no centering; 1: center of gravity "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit1"; token.key_prefix = "--"; token.label = "Maximum iterations - GA step"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "400"; token.restore = "400"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit2"; token.key_prefix = "--"; token.label = "Maximum iterations - Finish step"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "50"; token.restore = "50"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask3D"; token.key_prefix = "--"; token.label = "3D mask"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "sphere"; token.restore = "sphere"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "moon_elimination"; token.key_prefix = "--"; token.label = "Eliminate disconnected regions"; token.help = "Used to removed disconnected pieces from the model. It requires as argument a comma separated string with the mass in KDa and the pixel size. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "L2threshold"; token.key_prefix = "--"; token.label = "GA stop threshold"; token.help = "Defines the maximum relative dispersion of volumes' L2 norms. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.03"; token.restore = "0.03"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ref_a"; token.key_prefix = "--"; token.label = "Projection generation method"; token.help = "Method for generating the quasi-uniformly distributed projection directions. S- Saff algorithm, or P - Penczek 1994 algorithm. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "S"; token.restore = "S"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nruns"; token.key_prefix = "--"; token.label = "GA population size"; token.help = "This defines the number of quasi-independent volumes generated. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "6"; token.restore = "6"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "doga"; token.key_prefix = "--"; token.label = "Threshold to start GA"; token.help = "Do GA when the fraction of orientation that changes less than 1.0 degrees is at least this fraction. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixels]"; token.help = "Using a hyperbolic tangent low-pass filter. Specify with absolute frequency. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.25"; token.restore = "0.25"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "aa"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixels]"; token.help = "Fall-off of for the hyperbolic tangent low-pass filter. Specify with absolute frequency. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pwreference"; token.key_prefix = "--"; token.label = "Power spectrum reference"; token.help = "Text file containing a 1D reference power spectrum. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "debug"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Print debug info. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxpdb2em"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "PDB File Conversion"; sxcmd.short_info = "Converts an atomic model into a simulated density map"; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_alt"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_pdb"; token.key_prefix = ""; token.label = "Input PDB file"; token.help = "Starting atomic coordinates: "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "pdb"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_hdf"; token.key_prefix = ""; token.label = "Output map"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apix"; token.key_prefix = "--"; token.label = "Pixel size of output map [A]"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box"; token.key_prefix = "--"; token.label = "Output box size [Voxels]"; token.help = "If not given, the program will find the minimum box size fitting the structure. Be aware that this will most likely result in a rectangular box. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "het"; token.key_prefix = "--"; token.label = "Include hetero atoms"; token.help = "Otherwise the HETATM entries in the PDB file are ignored. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center"; token.key_prefix = "--"; token.label = "Center model at the origin"; token.help = ": Specifies whether the atomic model should be moved to the origin before generating density map. Available options are: c - Use the geometrical center of atoms; a - Use the center of mass (recommended); x,y,z - Vector to be subtracted for the coordinates. Default: no centering, in which case (0,0,0) in the PDB space will map to the center of the EM volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "n"; token.restore = "n"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "O"; token.key_prefix = "--"; token.label = "Apply additional rotation"; token.help = "This can be used to modify the orientation of the atomic model. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "tr0"; token.key_prefix = "--"; token.label = "Rotational matrix file"; token.help = ": This file must contain the 3x4 transformation matrix to be applied to the PDB coordinates after centering. The translation vector (last column of the matrix) must be specified in Angstrom. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "quiet"; token.key_prefix = "--"; token.label = "Silent mode"; token.help = "Does not print any information to the monitor. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "adaptive_mask"; sxcmd.label = "Adaptive 3D Mask"; sxcmd.short_info = "Create soft-edged 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "adaptive_mask"; token.key_prefix = "--"; token.label = "Create soft-edged 3D mask"; token.help = "Create soft-edged 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nsigma"; token.key_prefix = "--"; token.label = "Density standard deviations threshold"; token.help = "Defines the threshold used to find the main volume within the data. All voxels with density <= mean + nsigma standard deviations will be included into the main volume. This option will not be used if the option threshold is larger than -9999.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. With the value lower than the default, the option will be ignored and the mask will be set according to nsigma. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-9999.0"; token.restore = "-9999.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ndilation"; token.key_prefix = "--"; token.label = "Mask extension cycles"; token.help = "The initial mask will be extended this number of cycles. To keep the size of the main volume, set this to kernel_size/2 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kernel_size"; token.key_prefix = "--"; token.label = "Gaussian kernel size [Pixels]"; token.help = "Size of the gaussian kernel used to smooth the binary mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "11"; token.restore = "11"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "gauss_standard_dev"; token.key_prefix = "--"; token.label = "Kernel standard deviation [Pixels]"; token.help = "Standard deviation used in the construction of the gaussian smoothing of the mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "9"; token.restore = "9"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "angular_distribution"; sxcmd.label = "Angular Distribution"; sxcmd.short_info = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_viper"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "angular_distribution"; token.key_prefix = "--"; token.label = "Create angular distribution file"; token.help = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Alignment Parameter file"; token.help = "Alignment Parameter file created by a previous 3D reconstruction step (e.g. sxmeridien.py)"; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "round_digit"; token.key_prefix = "--"; token.label = "Number precision"; token.help = "Decimal numbers will be rounded to this number of decimal points. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = "Box size in pixel used for calculating the center of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "particle_radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Used for the representation in Chimera. Defines where the cylinders representing the histogram must start. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "175"; token.restore = "175"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_width"; token.key_prefix = "--"; token.label = "Cylinder width"; token.help = "Used for the representation in Chimera. This will define the width of the cylinders representing the histogram."; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_length"; token.key_prefix = "--"; token.label = "Cylinder length"; token.help = "Used for the representation in Chimera. This will define the relative size of the cylinders representing the histogram. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10000"; token.restore = "10000"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxmeridien"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Refinement"; sxcmd.short_info = "Perform 3D structure refinement."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack"; token.key_prefix = ""; token.label = "Input image stack"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically if it does not exist. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output_continue"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "initial_volume"; token.key_prefix = ""; token.label = "Initial 3D reference"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Outer radius [in pixels] of particles < int(nx/2)-1 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask3D"; token.key_prefix = "--"; token.label = "3D mask"; token.help = "Soft mask for the volume. If not given, a hard sphere of radius boxsize/2-1 will be used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "symmetry"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = "Symmetry of the refined structure. Acceptable values are: cn, dn, where n is multiplicity. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inires"; token.key_prefix = "--"; token.label = "Starting resolution [A]"; token.help = "Resolution used to start the refinement. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "25.0"; token.restore = "25.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "delta"; token.key_prefix = "--"; token.label = "Initial angular sampling step [Degrees]"; token.help = "Initial angular sampling step. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "7.5"; token.restore = "7.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "do_final"; token.key_prefix = "--"; token.label = "Do only final reconstruction"; token.help = "Specify the iteration where you wish to perform only final reconstruction using the alignment parameters. By setting to 0, program searches the iteration which achieved the best resolution, then performs only final reconstruction using the alignment parameters. By default, the program performs the final reconstruction using the best iteration after the convergence. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "memory_per_node"; token.key_prefix = "--"; token.label = "Memory per node [GB]"; token.help = "User provided information about memory per node in GB (NOT per CPU). By default, it uses 2GB * (number of CPUs per node) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "Search range [Pixels]"; token.help = "Range for translation search in both directions. Search is +/-xr. It can be fractional "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5.0"; token.restore = "5.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Search step size [Pixels]"; token.help = "Step size of translation search in both directions. Search is within a circle of radius xr on a grid with steps ts. It can be fractional. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "skip_prealignment"; token.key_prefix = "--"; token.label = "Do 2D pre-alignment"; token.help = "Indicate if pre-alignment should be used or not. Do not use 2D pre-alignment if images are already centered. By default, do 2D pre-alignment. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "initialshifts"; token.key_prefix = "--"; token.label = "Read shifts from header"; token.help = "Start with the shift parameters stored in the image headers. Useful to jumpstart the procedure. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center_method"; token.key_prefix = "--"; token.label = "Centering method"; token.help = "Method for centering averages during initial 2D prealignment of data (0: no centering; -1: average shift method; For 1-7, see center_2D in utilities.py) "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "target_radius"; token.key_prefix = "--"; token.label = "Target particle radius [Pixels]"; token.help = "For 2D prealignment, images will be shrank/enlarged to this radius. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "29"; token.restore = "29"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shake"; token.key_prefix = "--"; token.label = "Shake"; token.help = "Shake "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.5"; token.restore = "0.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "small_memory"; token.key_prefix = "--"; token.label = "Keep data in memory"; token.help = "Indicate if data should be kept in memory or not. By default, data will be kept in memory. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ref_a"; token.key_prefix = "--"; token.label = "Projection generation method"; token.help = "Method for generating the quasi-uniformly distributed projection directions. S- Saff algorithm, or P - Penczek 1994 algorithm. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "S"; token.restore = "S"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ccfpercentage"; token.key_prefix = "--"; token.label = "Correlation peaks to be included [%]"; token.help = "Percentage of correlation peaks to be included. 0.0 corresponds to hard matching. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "99.9"; token.restore = "99.9"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nonorm"; token.key_prefix = "--"; token.label = "Apply image norm correction"; token.help = "Indicate if image norm correction should be applied or not. By default, apply image norm correction. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "function"; token.key_prefix = "--"; token.label = "Reference preparation function"; token.help = "Function used to prepare the reference volume after each iteration. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "do_volume_mask"; token.restore = "do_volume_mask"; token.type = "function"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "postprocess"; sxcmd.label = "Sharpening"; sxcmd.short_info = "B-factor is estimated from two unfiltered 3D volumes produced by meridien. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "postprocess"; token.key_prefix = "--"; token.label = "Apply B-factor to sharpen the power spectrum"; token.help = "B-factor is estimated from two unfiltered 3D volumes produced by meridien. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "firstvolume"; token.key_prefix = ""; token.label = "First unfiltered half-volume "; token.help = "Generated by sxmeridien"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "secondvolume"; token.key_prefix = ""; token.label = "Second unfiltered half-volume "; token.help = "Generated by sxmeridien"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mtf"; token.key_prefix = "--"; token.label = "MTF file"; token.help = "File contains the MTF (modulation transfer function) of the detector used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fsc_adj"; token.key_prefix = "--"; token.label = "Apply FSC-based low-pass filter"; token.help = "Applies an FSC-based low-pass filter to the merged volume before the B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_enhance"; token.key_prefix = "--"; token.label = "B-factor enhancement"; token.help = "-1.0: B-factor is not applied; 0.0: program estimates B-factor from options. B_start (usually 10 Angstrom) to the resolution determined by FSC143; 128.0: program use the given value 128.0 [A^2] to enhance map. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixels]"; token.help = "0.0: low-pass filter to resolution; A value > 0.5: low-pass filter to the value in Angstrom; A value > 0.0 and < 0.5: low-pass filter to the value in absolute frequency; -1.0: no low-pass filter. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "aa"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixels]"; token.help = "Low-pass filter fall-off. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask"; token.key_prefix = "--"; token.label = "User-provided mask"; token.help = "Path to user-provided mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output"; token.key_prefix = "--"; token.label = "Output file"; token.help = "Output file name. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "vol_postrefine_masked.hdf"; token.restore = "vol_postrefine_masked.hdf"; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_start"; token.key_prefix = "--"; token.label = "B-factor lower limit [A]"; token.help = "Lower limit for B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10.0"; token.restore = "10.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_stop"; token.key_prefix = "--"; token.label = "B-factor higher limit [A]"; token.help = "Higher limit for B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "do_adaptive_mask"; token.key_prefix = "--"; token.label = "Apply adaptive mask"; token.help = "Program creates mask adaptively with given density threshold. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask_threshold"; token.key_prefix = "--"; token.label = "Adaptive mask threshold"; token.help = "Density threshold for creating adaptive surface mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.02"; token.restore = "0.02"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "consine_edge"; token.key_prefix = "--"; token.label = "Cosine edge width [Pixels]"; token.help = "Width of cosine transition area for soft-edge masking. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "6.0"; token.restore = "6.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dilation"; token.key_prefix = "--"; token.label = "surface dilation size [Pixels]"; token.help = "Size of surface dilation or erosion. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3.0"; token.restore = "3.0"; token.type = "float"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxgui_meridien"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Refinement Assessment"; sxcmd.short_info = "GUI tool to assess 3D Refinement based on outputs of sxmeridien."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = False
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "adaptive_mask"; sxcmd.label = "Adaptive 3D Mask"; sxcmd.short_info = "Create soft-edged 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "adaptive_mask"; token.key_prefix = "--"; token.label = "Create soft-edged 3D mask"; token.help = "Create soft-edged 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nsigma"; token.key_prefix = "--"; token.label = "Density standard deviations threshold"; token.help = "Defines the threshold used to find the main volume within the data. All voxels with density <= mean + nsigma standard deviations will be included into the main volume. This option will not be used if the option threshold is larger than -9999.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. With the value lower than the default, the option will be ignored and the mask will be set according to nsigma. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-9999.0"; token.restore = "-9999.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ndilation"; token.key_prefix = "--"; token.label = "Mask extension cycles"; token.help = "The initial mask will be extended this number of cycles. To keep the size of the main volume, set this to kernel_size/2 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kernel_size"; token.key_prefix = "--"; token.label = "Gaussian kernel size [Pixels]"; token.help = "Size of the gaussian kernel used to smooth the binary mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "11"; token.restore = "11"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "gauss_standard_dev"; token.key_prefix = "--"; token.label = "Kernel standard deviation [Pixels]"; token.help = "Standard deviation used in the construction of the gaussian smoothing of the mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "9"; token.restore = "9"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "angular_distribution"; sxcmd.label = "Angular Distribution"; sxcmd.short_info = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_meridien"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "angular_distribution"; token.key_prefix = "--"; token.label = "Create angular distribution file"; token.help = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Alignment Parameter file"; token.help = "Alignment Parameter file created by a previous 3D reconstruction step (e.g. sxmeridien.py)"; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "round_digit"; token.key_prefix = "--"; token.label = "Number precision"; token.help = "Decimal numbers will be rounded to this number of decimal points. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = "Box size in pixel used for calculating the center of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "particle_radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Used for the representation in Chimera. Defines where the cylinders representing the histogram must start. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "175"; token.restore = "175"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_width"; token.key_prefix = "--"; token.label = "Cylinder width"; token.help = "Used for the representation in Chimera. This will define the width of the cylinders representing the histogram."; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_length"; token.key_prefix = "--"; token.label = "Cylinder length"; token.help = "Used for the representation in Chimera. This will define the relative size of the cylinders representing the histogram. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10000"; token.restore = "10000"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sx3dvariability"; sxcmd.subname = ""; sxcmd.mode = "symmetrize"; sxcmd.label = "3D Variability Preprocess"; sxcmd.short_info = "Please skip this preparation step if the structure is asymmetrical (i.e. c1), since it is required only when the structure has internal symmetry. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "symmetrize"; token.key_prefix = "--"; token.label = "Symmetrise input stack"; token.help = "Please skip this preparation step if the structure is asymmetrical (i.e. c1), since it is required only when the structure has internal symmetry. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "prj_stack"; token.key_prefix = ""; token.label = "Input image stack"; token.help = "The images must containt the 3D orientation parameters in the header and optionally CTF information. The output image stack is bdb:sdata. Please use it as an input image stack of sx3dvariability."; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = "Only required if the structure has symmetry higher than c1. Notice this step can be run with only one CPU and there is no MPI version for it. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sx3dvariability"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Variability Estimation"; sxcmd.short_info = "Calculate 3D variability using a set of aligned projection images as an input."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "prj_stack"; token.key_prefix = ""; token.label = "Input image stack"; token.help = "The images must containt the 3D orientation parameters in the header and optionally CTF information. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ave3D"; token.key_prefix = "--"; token.label = "Output 3D average"; token.help = "3D reconstruction computed from projections averaged within respective angular neighbourhood. It should be used to assess the resolvability and possible artifacts of the variability map. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "var3D"; token.key_prefix = "--"; token.label = "Output 3D variability"; token.help = "It creates a volume containing, for each voxel, a measure of the variability in the dataset. Careful, time consuming! "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "img_per_grp"; token.key_prefix = "--"; token.label = "Number of projections"; token.help = "Images from the angular neighbourhood that will be used to estimate 2D variance for each projection data. The larger the number the less noisy the estimate, but the lower the resolution. Usage of large number also results in rotational artifacts in variances that will be visible in 3D variability volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10"; token.restore = "10"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = "Only required if the structure has symmetry higher than c1. Notice this step can be run with only one CPU and there is no MPI version for it. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "CTF"; token.key_prefix = "--"; token.label = "Use CTF"; token.help = "If set to true, the a CTF correction will be applied using the parameters found in the image headers. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixel]"; token.help = "Filter to be applied to the images prior to variability calculation. Specify with absolute frequency. By default, no filtering. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "aa"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixel]"; token.help = "Filter to be applied to the images prior to variability calculation. Specify with absolute frequency. By default, no filtering. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "no_norm"; token.key_prefix = "--"; token.label = "Apply normalization"; token.help = "Indicate if normalization should be applied or not. By default, apply normalization. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "npad"; token.key_prefix = "--"; token.label = "Image padding factor"; token.help = "The images are padded to achieve the original size times this option. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2"; token.restore = "2"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "VAR"; token.key_prefix = "--"; token.label = "Stack on input consists of 2D variances"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "decimate"; token.key_prefix = "--"; token.label = "Image decimate factor"; token.help = "Reduce images by this factor and change the pixel size. Specify a value large than 1.0 to expand images, and less than 1.0 to shrink them. By default, it does not change size of images"; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "window"; token.key_prefix = "--"; token.label = "User smaller window size [Pixels]"; token.help = "Window (or clip) images using the specified size without changing pixel size. By default, use the original particle image size. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nvec"; token.key_prefix = "--"; token.label = "Number of eigenvectors"; token.help = "By default, no PCA calculated. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "VERBOSE"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Useful for debugging. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ave2D"; token.key_prefix = "--"; token.label = "Output 2D averages"; token.help = "Useful for debugging. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "var2D"; token.key_prefix = "--"; token.label = "Output 2D variances"; token.help = "Useful for debugging. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxrsort3d"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Clustering - RSORT3D"; sxcmd.short_info = "Reproducible 3D Clustering of heterogeneous dataset. Sort out 3D heterogeneity of 2D data whose 3D reconstruction parameters have been determined already."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "refinement_method"; token.key_prefix = "--"; token.label = "Input 3D refinement method"; token.help = "Valid values are 'SPARX' and 'relion'. Currently, SHPIRE GUI (sxgui) supports only 'SPARX'. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = ""; token.restore = "SPARX"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "refinement_dir"; token.key_prefix = "--"; token.label = "Input 3D refinement directory"; token.help = "Usually the master output directory of sxmeridien. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "directory"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "masterdir"; token.key_prefix = "--"; token.label = "Output directory"; token.help = "The master output directory for sorting. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "niter_for_sorting"; token.key_prefix = "--"; token.label = "3D refinement iteration"; token.help = "Specify an iteration number of 3D refinement where the 3D alignment parameters should be extracted for this sorting. By default, it uses iteration achieved best resolution. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask3D"; token.key_prefix = "--"; token.label = "3D mask"; token.help = "File path of the global 3D mask for clustering. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "focus"; token.key_prefix = "--"; token.label = "Focus 3D mask"; token.help = "File path of a binary 3D mask for focused clustering. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Outer radius for rotational correlation [Pixels]"; token.help = "Particle radius in pixel for rotational correlation. The value must be smaller than half the box size. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = "Point group symmetry of the structure. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "number_of_images_per_group"; token.key_prefix = "--"; token.label = "Images per group"; token.help = "The number of images per a group. This value is critical for successful 3D clustering. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1000"; token.restore = "1000"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "smallest_group"; token.key_prefix = "--"; token.label = "Smallest group size"; token.help = "Minimum number of members for being identified as a group. This value must be smaller than the number of images per a group (number_of_images_per_group). "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nxinit"; token.key_prefix = "--"; token.label = "Initial image size for sorting [Pixels]"; token.help = "If it is necessary to speed up the processing time, set a non-zero positive integer to this option. Then, the program will reduce image size of original data by resampling to the specified size. By default, program determines the value from resolution. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "low_pass_filter"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixel]"; token.help = "Absolute frequency cutoff of the low-pass filter used on the original image size for the 3D sorting. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "Kmeans_lpf"; token.key_prefix = "--"; token.label = "Low-pass filter method for K-means"; token.help = "Low-pass filter method for K-means clustering. Valid values are 'adaptive', 'max', 'min', 'adhoc', and 'avg'. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "adaptive"; token.restore = "adaptive"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nindependent"; token.key_prefix = "--"; token.label = "Independent runs"; token.help = "Number of independent runs for Equal Sized K-means clustering. The value must be an odd number larger than 2. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "noctf"; token.key_prefix = "--"; token.label = "Apply full CTF correction"; token.help = "Use this option if full CTF correction should not be applied during the 3D clustering. By default, the program will do full CTF correction. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "PWadjustment"; token.key_prefix = "--"; token.label = "Reference power spectrum file path"; token.help = "Path of text file containing 1D reference power spectrum of a PDB structure or EM map. The power spectrum will be used as reference to adjust the power spectra of clustered volumes. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "interpolation"; token.key_prefix = "--"; token.label = "3D reconstruction interpolation method"; token.help = "Interpolation method for 3D reconstruction. Valid values are 'trl' and '4nn'. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "4nn"; token.restore = "4nn"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "comparison_method"; token.key_prefix = "--"; token.label = "Comparison method"; token.help = "Similarity measurement for the comparison between reprojected reference images and particle images. Valid values are 'cross' (cross-correlaton coefficients) and 'eucd' (Euclidean distance). "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "cross"; token.restore = "cross"; token.type = "string"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxmeridien"; sxcmd.subname = ""; sxcmd.mode = "ctref"; sxcmd.label = "Subset Refinement"; sxcmd.short_info = "Continue refinement from a local/exhaustive state using the full dataset or selected subset."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "ctref"; token.key_prefix = "--"; token.label = "Continuation refinement"; token.help = "Continue refinement from a local/exhaustive state using the full dataset or selected subset."; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically if it does not exist. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "output_continue"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_subset"; token.key_prefix = "--"; token.label = "Selection text file"; token.help = "Selection text file that contains indexes of the data subset. Typically, Cluster#.txt created by sxrsort3d_new (e.g. Cluster1.txt). "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_oldrefdir"; token.key_prefix = "--"; token.label = "3D refinement directory"; token.help = "The master output directory of 3D refinement run which you wish to continue. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "directory"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_iter"; token.key_prefix = "--"; token.label = "Restarting iteration"; token.help = "The iteration from which 3D refinement should be continued. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_initvol"; token.key_prefix = "--"; token.label = "Initial 3D reference for continuation"; token.help = "User-provided 3D reference for continuation run. Effective only for continuation refinement. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_orgstack"; token.key_prefix = "--"; token.label = "BDB stack for continuation "; token.help = "BDB stack for continuation with xform.projection parameters written in the headers. If this option is set, the program ignores the ctref_oldrefdir option. Effective only for continuation refinement. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "bdb"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_smearing"; token.key_prefix = "--"; token.label = "Smear usage of initial 3D reconstruction"; token.help = "-1: Use optimal xform.projection parameters only and norm = 1.0; 0: Use all smears; >1: Use option specified number of smears. Effective only for continuation refinement. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ctref_an"; token.key_prefix = "--"; token.label = "Initial angular neighborhood [Degrees]"; token.help = "Angular neighborhood for local search during initial 3D reconstruction of continuation refinement. Effective only for continuation refinement. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "memory_per_node"; token.key_prefix = "--"; token.label = "Memory per node [GB]"; token.help = "User provided information about memory per node in GB (NOT per CPU). By default, it uses 2GB * (number of CPUs per node) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Outer radius [in pixels] of particles < int(nx/2)-1 "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask3D"; token.key_prefix = "--"; token.label = "3D mask"; token.help = "Soft mask for the volume. If not given, a hard sphere of radius boxsize/2-1 will be used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "symmetry"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = "Symmetry of the refined structure. Acceptable values are: cn, dn, where n is multiplicity. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inires"; token.key_prefix = "--"; token.label = "Starting resolution [A]"; token.help = "Resolution used to start the refinement. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "25.0"; token.restore = "25.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "delta"; token.key_prefix = "--"; token.label = "Initial angular sampling step [Degrees]"; token.help = "Initial angular sampling step. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "7.5"; token.restore = "7.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "Search range [Pixels]"; token.help = "Range for translation search in both directions. Search is +/-xr. It can be fractional "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "5.0"; token.restore = "5.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Search step size [Pixels]"; token.help = "Step size of translation search in both directions. Search is within a circle of radius xr on a grid with steps ts. It can be fractional. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shake"; token.key_prefix = "--"; token.label = "Shake"; token.help = "Shake "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.5"; token.restore = "0.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "small_memory"; token.key_prefix = "--"; token.label = "Keep data in memory"; token.help = "Indicate if data should be kept in memory or not. By default, data will be kept in memory. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ref_a"; token.key_prefix = "--"; token.label = "Projection generation method"; token.help = "Method for generating the quasi-uniformly distributed projection directions. S- Saff algorithm, or P - Penczek 1994 algorithm. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "S"; token.restore = "S"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ccfpercentage"; token.key_prefix = "--"; token.label = "Correlation peaks to be included [%]"; token.help = "Percentage of correlation peaks to be included. 0.0 corresponds to hard matching. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "99.9"; token.restore = "99.9"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nonorm"; token.key_prefix = "--"; token.label = "Apply image norm correction"; token.help = "Indicate if image norm correction should be applied or not. By default, apply image norm correction. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "postprocess"; sxcmd.label = "Sharpening"; sxcmd.short_info = "B-factor is estimated from two unfiltered 3D volumes produced by meridien. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "postprocess"; token.key_prefix = "--"; token.label = "Apply B-factor to sharpen the power spectrum"; token.help = "B-factor is estimated from two unfiltered 3D volumes produced by meridien. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "firstvolume"; token.key_prefix = ""; token.label = "First unfiltered half-volume "; token.help = "Generated by sxmeridien"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "secondvolume"; token.key_prefix = ""; token.label = "Second unfiltered half-volume "; token.help = "Generated by sxmeridien"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mtf"; token.key_prefix = "--"; token.label = "MTF file"; token.help = "File contains the MTF (modulation transfer function) of the detector used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fsc_adj"; token.key_prefix = "--"; token.label = "Apply FSC-based low-pass filter"; token.help = "Applies an FSC-based low-pass filter to the merged volume before the B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_enhance"; token.key_prefix = "--"; token.label = "B-factor enhancement"; token.help = "-1.0: B-factor is not applied; 0.0: program estimates B-factor from options. B_start (usually 10 Angstrom) to the resolution determined by FSC143; 128.0: program use the given value 128.0 [A^2] to enhance map. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fl"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixels]"; token.help = "0.0: low-pass filter to resolution; A value > 0.5: low-pass filter to the value in Angstrom; A value > 0.0 and < 0.5: low-pass filter to the value in absolute frequency; -1.0: no low-pass filter. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "aa"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixels]"; token.help = "Low-pass filter fall-off. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask"; token.key_prefix = "--"; token.label = "User-provided mask"; token.help = "Path to user-provided mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output"; token.key_prefix = "--"; token.label = "Output file"; token.help = "Output file name. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "vol_postrefine_masked.hdf"; token.restore = "vol_postrefine_masked.hdf"; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_start"; token.key_prefix = "--"; token.label = "B-factor lower limit [A]"; token.help = "Lower limit for B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10.0"; token.restore = "10.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "B_stop"; token.key_prefix = "--"; token.label = "B-factor higher limit [A]"; token.help = "Higher limit for B-factor estimation. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "do_adaptive_mask"; token.key_prefix = "--"; token.label = "Apply adaptive mask"; token.help = "Program creates mask adaptively with given density threshold. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask_threshold"; token.key_prefix = "--"; token.label = "Adaptive mask threshold"; token.help = "Density threshold for creating adaptive surface mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.02"; token.restore = "0.02"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "consine_edge"; token.key_prefix = "--"; token.label = "Cosine edge width [Pixels]"; token.help = "Width of cosine transition area for soft-edge masking. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "6.0"; token.restore = "6.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dilation"; token.key_prefix = "--"; token.label = "surface dilation size [Pixels]"; token.help = "Size of surface dilation or erosion. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3.0"; token.restore = "3.0"; token.type = "float"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxsort3d"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Clustering - SORT3D"; sxcmd.short_info = "Sort 3D heterogeneity based on the reproducible members of K-means and Equal K-means classification. It runs after 3D refinement where the alignment parameters are determined."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_alt"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "stack"; token.key_prefix = ""; token.label = "Input images stack"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "outdir"; token.key_prefix = ""; token.label = "Output directory"; token.help = "There is a log.txt that describes the sequences of computations in the program. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "mask"; token.key_prefix = ""; token.label = "3D mask"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "focus"; token.key_prefix = "--"; token.label = "Focus 3D mask"; token.help = "Mask used for focused clustering "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ir"; token.key_prefix = "--"; token.label = "Inner radius for rotational correlation [Pixels]"; token.help = "Must be bigger than 1. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Outer radius for rotational correlation [Pixels]"; token.help = "Must be smaller than half the box size. Please set to the radius of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maxit"; token.key_prefix = "--"; token.label = "Maximum iterations"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "25"; token.restore = "25"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "rs"; token.key_prefix = "--"; token.label = "Step between rings in rotational correlation"; token.help = "Must be bigger than 0. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "xr"; token.key_prefix = "--"; token.label = "X search range [Pixels]"; token.help = "The translational search range in the x direction will take place in a +/xr range. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "yr"; token.key_prefix = "--"; token.label = "Y search range [Pixels]"; token.help = "The translational search range in the y direction. If omitted it will be set as xr. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ts"; token.key_prefix = "--"; token.label = "Translational search step [Pixels]"; token.help = "The search will be performed in -xr, -xr+ts, 0, xr-ts, xr, can be fractional. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.25"; token.restore = "0.25"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "delta"; token.key_prefix = "--"; token.label = "Angular step for projections [Degrees]"; token.help = "Angular step for projections"; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "'2'"; token.restore = "'2'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "an"; token.key_prefix = "--"; token.label = "Local angular search width [Degrees]"; token.help = "This defines the neighbourhood where the local angular search will be performed. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'-1'"; token.restore = "'-1'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center"; token.key_prefix = "--"; token.label = "Centering method"; token.help = "0 - if you do not want the volume to be centered, 1 - center the volume using the center of gravity. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nassign"; token.key_prefix = "--"; token.label = "Number of reassignment iterations"; token.help = "Performed for each angular step. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nrefine"; token.key_prefix = "--"; token.label = "Number of alignment iterations"; token.help = "Performed for each angular step. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "CTF"; token.key_prefix = "--"; token.label = "Use CTF"; token.help = "Do a full CTF correction during the alignment. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "stoprnct"; token.key_prefix = "--"; token.label = "Assignment convergence threshold [%]"; token.help = "Used to asses convergence of the run. It is the minimum percentage of assignment change required to stop the run. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "3.0"; token.restore = "3.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sym"; token.key_prefix = "--"; token.label = "Point-group symmetry"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "c1"; token.restore = "c1"; token.type = "sym"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "function"; token.key_prefix = "--"; token.label = "Reference preparation function"; token.help = "Function used to prepare the reference volume. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "do_volume_mrk02"; token.restore = "do_volume_mrk02"; token.type = "function"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "independent"; token.key_prefix = "--"; token.label = "Number of independent runs"; token.help = "Number of independent equal-Kmeans"; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "number_of_images_per_group"; token.key_prefix = "--"; token.label = "Images per group"; token.help = "Critical number of images per group, defined by user. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1000"; token.restore = "1000"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "low_pass_filter"; token.key_prefix = "--"; token.label = "Low-pass filter frequency [1/Pixels]"; token.help = "Low-pass filter used for the 3D sorting on the original image size. Specify with absolute frequency. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nxinit"; token.key_prefix = "--"; token.label = "Initial image size for sorting"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "64"; token.restore = "64"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "unaccounted"; token.key_prefix = "--"; token.label = "Reconstruct unaccounted images"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "seed"; token.key_prefix = "--"; token.label = "Seed"; token.help = "Seed used for the initial random assignment for EQ Kmeans. The program generates a random integer by default. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "smallest_group"; token.key_prefix = "--"; token.label = "Smallest group size"; token.help = "Minimum members for identified group. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "sausage"; token.key_prefix = "--"; token.label = "Use sausage filter"; token.help = ""; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "chunk0"; token.key_prefix = "--"; token.label = "Chunk file name for 1st subset"; token.help = "Name of chunk file containing particle IDs of 1st subset (chunk0) for computing margin of error. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "chunk1"; token.key_prefix = "--"; token.label = "Chunk file name for 2nd subset"; token.help = "Name of chunk file containing particle IDs of 2nd subset (chunk0) for computing margin of error. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "PWadjustment"; token.key_prefix = "--"; token.label = "Power spectrum reference"; token.help = "Text file containing a 1D reference power spectrum. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "protein_shape"; token.key_prefix = "--"; token.label = "Protein Shape"; token.help = "It defines protein preferred orientation angles. 'g' is for globular proteins and 'f' is for filament proteins. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'g'"; token.restore = "'g'"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "upscale"; token.key_prefix = "--"; token.label = "Power spectrum adjustment strength"; token.help = "This parameters adjusts how strongly the power spectrum of the volume should be modified to match the reference. A value of 1 brings the volume's power spectrum completely to the reference, while a value of 0 means no modification. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.5"; token.restore = "0.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "wn"; token.key_prefix = "--"; token.label = "Target image size [Pixels]"; token.help = "If different than 0, then the images will be rescaled to fit this size. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "interpolation"; token.key_prefix = "--"; token.label = "3D interpolation method"; token.help = "Method interpolation in 3D. Options are tr1 or 4nn. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "'4nn'"; token.restore = "'4nn'"; token.type = "string"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "adaptive_mask"; sxcmd.label = "Adaptive 3D Mask"; sxcmd.short_info = "Create soft-edged 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "adaptive_mask"; token.key_prefix = "--"; token.label = "Create soft-edged 3D mask"; token.help = "Create soft-edged 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nsigma"; token.key_prefix = "--"; token.label = "Density standard deviations threshold"; token.help = "Defines the threshold used to find the main volume within the data. All voxels with density <= mean + nsigma standard deviations will be included into the main volume. This option will not be used if the option threshold is larger than -9999.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. With the value lower than the default, the option will be ignored and the mask will be set according to nsigma. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-9999.0"; token.restore = "-9999.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ndilation"; token.key_prefix = "--"; token.label = "Mask extension cycles"; token.help = "The initial mask will be extended this number of cycles. To keep the size of the main volume, set this to kernel_size/2 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kernel_size"; token.key_prefix = "--"; token.label = "Gaussian kernel size [Pixels]"; token.help = "Size of the gaussian kernel used to smooth the binary mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "11"; token.restore = "11"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "gauss_standard_dev"; token.key_prefix = "--"; token.label = "Kernel standard deviation [Pixels]"; token.help = "Standard deviation used in the construction of the gaussian smoothing of the mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "9"; token.restore = "9"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "binary_mask"; sxcmd.label = "Binary 3D Mask"; sxcmd.short_info = "Create binary 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "binary_mask"; token.key_prefix = "--"; token.label = "Create binary 3D mask"; token.help = "Create binary 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "bin_threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ne"; token.key_prefix = "--"; token.label = "Erosion cycles"; token.help = "After initial binarization the volume is eroded to remove fragmented pieces of the volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nd"; token.key_prefix = "--"; token.label = "Dilation cycles"; token.help = "After erosing the binary volume is dilated back to smooth the surface and match the original size. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "angular_distribution"; sxcmd.label = "Angular Distribution"; sxcmd.short_info = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_sort3d"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "angular_distribution"; token.key_prefix = "--"; token.label = "Create angular distribution file"; token.help = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Alignment Parameter file"; token.help = "Alignment Parameter file created by a previous 3D reconstruction step (e.g. sxmeridien.py)"; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "round_digit"; token.key_prefix = "--"; token.label = "Number precision"; token.help = "Decimal numbers will be rounded to this number of decimal points. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = "Box size in pixel used for calculating the center of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "particle_radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Used for the representation in Chimera. Defines where the cylinders representing the histogram must start. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "175"; token.restore = "175"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_width"; token.key_prefix = "--"; token.label = "Cylinder width"; token.help = "Used for the representation in Chimera. This will define the width of the cylinders representing the histogram."; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_length"; token.key_prefix = "--"; token.label = "Cylinder length"; token.help = "Used for the representation in Chimera. This will define the relative size of the cylinders representing the histogram. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10000"; token.restore = "10000"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxlocres"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Local Resolution"; sxcmd.short_info = "Compute local resolution of a map."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = True; sxcmd.category = "sxc_localres"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "firstvolume"; token.key_prefix = ""; token.label = "First half-volume"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "secondvolume"; token.key_prefix = ""; token.label = "Second half-volume"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maskfile"; token.key_prefix = ""; token.label = "3D mask"; token.help = "Defines the region where the local filtering should be applied. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "outputfile"; token.key_prefix = ""; token.label = "Output volume"; token.help = "Each voxel contains the resolution for this area in absolute frequency units. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fsc"; token.key_prefix = "--"; token.label = "FSC output file"; token.help = "Contains the overall FSC curve computed by rotational averaging of local resolution values. It is truncated to res_overall. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "no curve"; token.restore = "no curve"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "wn"; token.key_prefix = "--"; token.label = "FSC window size [Pixels]"; token.help = "Defines the size of window where the local real-space FSC is computed. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "7"; token.restore = "7"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "step"; token.key_prefix = "--"; token.label = "Fourier shell step size [Pixels]"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cutoff"; token.key_prefix = "--"; token.label = "Resolution cut-off threshold [1/Pixels]"; token.help = "Specify resolution cut-off threshold for the FSC with absolute frequency. By default it goes to the Nyquist frequencies. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.5"; token.restore = "0.5"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Mask radius [Pixels]"; token.help = "In case no mask is provided, a hard sphere of this radius will be used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "res_overall"; token.key_prefix = "--"; token.label = "Overall resolution [1/Pixels]"; token.help = "Set an absolute frequency between 0 to 0.5 for calibration of the average local resolution. See Description section in the wiki page for details. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1.0"; token.restore = "-1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "out_ang_res"; token.key_prefix = "--"; token.label = "Save Angstrom local resolution"; token.help = "Additionally creates a local resolution file in Angstroms. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apix"; token.key_prefix = "--"; token.label = "Pixel size of half-volumes [A]"; token.help = "Effective only with --out_ang_res options. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxfilterlocal"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "3D Local Filter"; sxcmd.short_info = "Locally filter maps according to the local resolution determined by sxlocres.py."; sxcmd.mpi_support = True; sxcmd.mpi_add_flag = True; sxcmd.category = "sxc_localres"; sxcmd.role = "sxr_pipe"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "locres_volume"; token.key_prefix = ""; token.label = "Local resolution file"; token.help = "Volume file containing the local resolution estimate produced by sxlocres. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "maskfile"; token.key_prefix = ""; token.label = "3D mask"; token.help = "Defines the region where the local filtering should be applied. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "outputfile"; token.key_prefix = ""; token.label = "Output volume"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "radius"; token.key_prefix = "--"; token.label = "Mask radius [Pixels]"; token.help = "In case no mask is provided, a hard sphere of this radius will be used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "radius"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "falloff"; token.key_prefix = "--"; token.label = "Low-pass filter fall-off [1/Pixels]"; token.help = "The program uses a tangent low-pass filter. Specify with absolute frequency. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_localres"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "adaptive_mask"; sxcmd.label = "Adaptive 3D Mask"; sxcmd.short_info = "Create soft-edged 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_localres"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "adaptive_mask"; token.key_prefix = "--"; token.label = "Create soft-edged 3D mask"; token.help = "Create soft-edged 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nsigma"; token.key_prefix = "--"; token.label = "Density standard deviations threshold"; token.help = "Defines the threshold used to find the main volume within the data. All voxels with density <= mean + nsigma standard deviations will be included into the main volume. This option will not be used if the option threshold is larger than -9999.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. With the value lower than the default, the option will be ignored and the mask will be set according to nsigma. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-9999.0"; token.restore = "-9999.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ndilation"; token.key_prefix = "--"; token.label = "Mask extension cycles"; token.help = "The initial mask will be extended this number of cycles. To keep the size of the main volume, set this to kernel_size/2 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kernel_size"; token.key_prefix = "--"; token.label = "Gaussian kernel size [Pixels]"; token.help = "Size of the gaussian kernel used to smooth the binary mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "11"; token.restore = "11"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "gauss_standard_dev"; token.key_prefix = "--"; token.label = "Kernel standard deviation [Pixels]"; token.help = "Standard deviation used in the construction of the gaussian smoothing of the mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "9"; token.restore = "9"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "angular_distribution"; sxcmd.label = "Angular Distribution"; sxcmd.short_info = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_localres"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "angular_distribution"; token.key_prefix = "--"; token.label = "Create angular distribution file"; token.help = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Alignment Parameter file"; token.help = "Alignment Parameter file created by a previous 3D reconstruction step (e.g. sxmeridien.py)"; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "round_digit"; token.key_prefix = "--"; token.label = "Number precision"; token.help = "Decimal numbers will be rounded to this number of decimal points. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = "Box size in pixel used for calculating the center of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "particle_radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Used for the representation in Chimera. Defines where the cylinders representing the histogram must start. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "175"; token.restore = "175"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_width"; token.key_prefix = "--"; token.label = "Cylinder width"; token.help = "Used for the representation in Chimera. This will define the width of the cylinders representing the histogram."; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_length"; token.key_prefix = "--"; token.label = "Cylinder length"; token.help = "Used for the representation in Chimera. This will define the relative size of the cylinders representing the histogram. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10000"; token.restore = "10000"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2display"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Display Data"; sxcmd.short_info = "Displays images, volumes, or 1D plots."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = False
token = SXcmd_token(); token.key_base = "input_data_list"; token.key_prefix = ""; token.label = "Input files"; token.help = "List of input images, volumes, plots. Wild cards (e.g *) can be used to select a list of files. Not recommended when the list is too large. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_file_list"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "singleimage"; token.key_prefix = "--"; token.label = "Single image view"; token.help = "Displays a stack in a single image view: "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fullrange"; token.key_prefix = "--"; token.label = "Use full range of pixel values"; token.help = "Instead of default auto-contrast, use full range of pixel values for the display of particles stacks and 2D images, "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Accepted values 0-9. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxpdb2em"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "PDB File Conversion"; sxcmd.short_info = "Converts an atomic model into a simulated density map"; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "input_pdb"; token.key_prefix = ""; token.label = "Input PDB file"; token.help = "Starting atomic coordinates: "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "pdb"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_hdf"; token.key_prefix = ""; token.label = "Output map"; token.help = ""; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apix"; token.key_prefix = "--"; token.label = "Pixel size of output map [A]"; token.help = ""; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box"; token.key_prefix = "--"; token.label = "Output box size [Voxels]"; token.help = "If not given, the program will find the minimum box size fitting the structure. Be aware that this will most likely result in a rectangular box. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "het"; token.key_prefix = "--"; token.label = "Include hetero atoms"; token.help = "Otherwise the HETATM entries in the PDB file are ignored. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "center"; token.key_prefix = "--"; token.label = "Center model at the origin"; token.help = ": Specifies whether the atomic model should be moved to the origin before generating density map. Available options are: c - Use the geometrical center of atoms; a - Use the center of mass (recommended); x,y,z - Vector to be subtracted for the coordinates. Default: no centering, in which case (0,0,0) in the PDB space will map to the center of the EM volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "n"; token.restore = "n"; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "O"; token.key_prefix = "--"; token.label = "Apply additional rotation"; token.help = "This can be used to modify the orientation of the atomic model. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "tr0"; token.key_prefix = "--"; token.label = "Rotational matrix file"; token.help = ": This file must contain the 3x4 transformation matrix to be applied to the PDB coordinates after centering. The translation vector (last column of the matrix) must be specified in Angstrom. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "quiet"; token.key_prefix = "--"; token.label = "Silent mode"; token.help = "Does not print any information to the monitor. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "adaptive_mask"; sxcmd.label = "Adaptive 3D Mask"; sxcmd.short_info = "Create soft-edged 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "adaptive_mask"; token.key_prefix = "--"; token.label = "Create soft-edged 3D mask"; token.help = "Create soft-edged 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nsigma"; token.key_prefix = "--"; token.label = "Density standard deviations threshold"; token.help = "Defines the threshold used to find the main volume within the data. All voxels with density <= mean + nsigma standard deviations will be included into the main volume. This option will not be used if the option threshold is larger than -9999.0. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. With the value lower than the default, the option will be ignored and the mask will be set according to nsigma. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-9999.0"; token.restore = "-9999.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ndilation"; token.key_prefix = "--"; token.label = "Mask extension cycles"; token.help = "The initial mask will be extended this number of cycles. To keep the size of the main volume, set this to kernel_size/2 "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "kernel_size"; token.key_prefix = "--"; token.label = "Gaussian kernel size [Pixels]"; token.help = "Size of the gaussian kernel used to smooth the binary mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "11"; token.restore = "11"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "gauss_standard_dev"; token.key_prefix = "--"; token.label = "Kernel standard deviation [Pixels]"; token.help = "Standard deviation used in the construction of the gaussian smoothing of the mask. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "9"; token.restore = "9"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "binary_mask"; sxcmd.label = "Binary 3D Mask"; sxcmd.short_info = "Create binary 3D mask from reference volume. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "binary_mask"; token.key_prefix = "--"; token.label = "Create binary 3D mask"; token.help = "Create binary 3D mask from reference volume. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input volume"; token.help = "Input reference volume"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_mask3D"; token.key_prefix = ""; token.label = "Output mask"; token.help = "Output 3D mask"; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "bin_threshold"; token.key_prefix = "--"; token.label = "Binarization threshold"; token.help = "Below this value the data is assumed to not belong to the main volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ne"; token.key_prefix = "--"; token.label = "Erosion cycles"; token.help = "After initial binarization the volume is eroded to remove fragmented pieces of the volume. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nd"; token.key_prefix = "--"; token.label = "Dilation cycles"; token.help = "After erosing the binary volume is dilated back to smooth the surface and match the original size. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0"; token.restore = "0"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "changesize"; sxcmd.label = "Change Size of Image or Volume"; sxcmd.short_info = "Change size of image or volume (resample, decimation or interpolation up). The process also changes the pixel size and window size accordingly. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "changesize"; token.key_prefix = "--"; token.label = "Change size of image or volume"; token.help = "Change size of image or volume (resample, decimation or interpolation up). The process also changes the pixel size and window size accordingly. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_stack"; token.key_prefix = ""; token.label = "Input image or volume"; token.help = "Input image or volume."; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_stack"; token.key_prefix = ""; token.label = "Output resized image or volume"; token.help = "Resized (decimated or interpolated up) image or volume."; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "ratio"; token.key_prefix = "--"; token.label = "Ratio of new to old image size"; token.help = "if < 1, the pixel size will increase and image size decrease. if > 1, the other way round. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "float"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2proc3d"; sxcmd.subname = ""; sxcmd.mode = "clip"; sxcmd.label = "Window Image or Volume"; sxcmd.short_info = "Window (pad or clip) volume to the specific dimensions. Specify 1, 3 or 6 arguments; '[,,[,,,]]'. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "clip"; token.key_prefix = "--"; token.label = "Window to specified size [Pixels]"; token.help = "Window (pad or clip) volume to the specific dimensions. Specify 1, 3 or 6 arguments; '[,,[,,,]]'. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "string"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_volume"; token.key_prefix = ""; token.label = "Input image or volume"; token.help = "Path to input image or volume file."; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_file"; token.key_prefix = ""; token.label = "Output windowed image or volume"; token.help = "Path to output windowed (clipped/padded) image or volume file."; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxprocess"; sxcmd.subname = ""; sxcmd.mode = "angular_distribution"; sxcmd.label = "Angular Distribution"; sxcmd.short_info = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "angular_distribution"; token.key_prefix = "--"; token.label = "Create angular distribution file"; token.help = "Create angular distribution file containing a 3D representation of the given angular distribution. It can be viewed with UCFS Chimera. "; token.group = "main"; token.is_required = True; token.is_locked = True; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "inputfile"; token.key_prefix = ""; token.label = "Alignment Parameter file"; token.help = "Alignment Parameter file created by a previous 3D reconstruction step (e.g. sxmeridien.py)"; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_file"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "Pixel size in A. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1.0"; token.restore = "1.0"; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "round_digit"; token.key_prefix = "--"; token.label = "Number precision"; token.help = "Decimal numbers will be rounded to this number of decimal points. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "5"; token.restore = "5"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "box_size"; token.key_prefix = "--"; token.label = "Box size [Pixels]"; token.help = "Box size in pixel used for calculating the center of the particle. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "500"; token.restore = "500"; token.type = "box"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "particle_radius"; token.key_prefix = "--"; token.label = "Particle radius [Pixels]"; token.help = "Used for the representation in Chimera. Defines where the cylinders representing the histogram must start. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "175"; token.restore = "175"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_width"; token.key_prefix = "--"; token.label = "Cylinder width"; token.help = "Used for the representation in Chimera. This will define the width of the cylinders representing the histogram."; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "cylinder_length"; token.key_prefix = "--"; token.label = "Cylinder length"; token.help = "Used for the representation in Chimera. This will define the relative size of the cylinders representing the histogram. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "10000"; token.restore = "10000"; token.type = "int"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxunblur"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Micrograph Movie Alignment"; sxcmd.short_info = "Align frames of micrograph movies with Unblur."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "unblur_path"; token.key_prefix = ""; token.label = "unblur executable path"; token.help = "Specify the file path of unblur executable. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_micrograph_pattern"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify path pattern of input micrographs with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). bdb files can not be selected as input micrographs. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. (This argument is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "summovie_path"; token.key_prefix = "--"; token.label = "summovie executable path"; token.help = "Specify the file path of summovie executable. (This option is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify a name of micrograph selection list text file. The file extension must be '.txt'. If this is not provided, all files matched with the micrograph name pattern will be processed. (This option is specific to SPHIRE, and not directly used by unblur and summovie executables.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_frames"; token.key_prefix = "--"; token.label = "Number of movie frames"; token.help = "The number of movie frames in each input micrograph. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of input micrographs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "voltage"; token.key_prefix = "--"; token.label = "Microscope voltage [kV]"; token.help = "The acceleration voltage of microscope used for imaging. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "300.0"; token.restore = "300.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "exposure_per_frame"; token.key_prefix = "--"; token.label = "Per frame exposure [e/A^2]"; token.help = "The electron dose per frame in e/A^2. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pre_exposure"; token.key_prefix = "--"; token.label = "Pre-exposure [e/A^2]"; token.help = "The electron does in e/A^2 used for exposure prior to imaging. Will be ignored when skip_dose_filter option is used. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_threads"; token.key_prefix = "--"; token.label = "Number of threads"; token.help = "The number of threads unblur can use. The higher the faster, but it requires larger memory. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "save_frames"; token.key_prefix = "--"; token.label = "Save aligned movie frames"; token.help = "Save aligned movie frames. This option slows down the process. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "skip_dose_filter"; token.key_prefix = "--"; token.label = "Apply dose filter"; token.help = "Indicate if dose filter should be applied or not. With this option, voltage, exposure per frame, and pre exposure will be ignored. By default, apply dose filter. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "expert_mode"; token.key_prefix = "--"; token.label = "Use expert mode"; token.help = "Requires initial shift, shift radius, b-factor, fourier_vertical, fourier_horizontal, shift threshold, iterations, restore noise, and verbosity options. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_initial"; token.key_prefix = "--"; token.label = "Minimum shift for initial search [A]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_radius"; token.key_prefix = "--"; token.label = "Outer radius shift limit [A]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "200.0"; token.restore = "200.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "b_factor"; token.key_prefix = "--"; token.label = "Apply B-factor to images [A^2]"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1500.0"; token.restore = "1500.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fourier_vertical"; token.key_prefix = "--"; token.label = "Vertical Fourier central mask size"; token.help = "The half-width of central vertical line of Fourier mask. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "fourier_horizontal"; token.key_prefix = "--"; token.label = "Horizontal Fourier central mask size"; token.help = "The half-width of central horizontal line of Fourier mask. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "shift_threshold"; token.key_prefix = "--"; token.label = "Termination shift threshold"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.1"; token.restore = "0.1"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "iterations"; token.key_prefix = "--"; token.label = "Maximum iterations"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "10"; token.restore = "10"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dont_restore_noise"; token.key_prefix = "--"; token.label = "Restore noise power"; token.help = "Indicate if noise power should be restored or not. By default, restore noise power. Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "verbose"; token.key_prefix = "--"; token.label = "Verbose"; token.help = "Effective only when expert_mode option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "sxsummovie"; sxcmd.subname = ""; sxcmd.mode = ""; sxcmd.label = "Micrograph Movie Summation"; sxcmd.short_info = "Sum micrograph movies with SumMovie."; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "summovie_path"; token.key_prefix = ""; token.label = "summovie executable path"; token.help = "Specify the file path of summovie executable. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "exe"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_micrograph_pattern"; token.key_prefix = ""; token.label = "Input micrograph path pattern"; token.help = "Specify path pattern of input micrographs with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). bdb files can not be selected as input micrographs. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "any_image"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_shift_pattern"; token.key_prefix = ""; token.label = "Input shift path pattern"; token.help = "Specify path pattern of input shift files with a wild card (*). Use the wild card to indicate the place of variable part of the file names (e.g. serial number, time stamp, and etc). The path pattern must be enclosed by single quotes (') or double quotes ('). (Note: sxgui.py automatically adds single quotes (')). The substring at the variable part must be same between the associated pair of input micrograph and shift file. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "parameters"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "output_directory"; token.key_prefix = ""; token.label = "Output directory"; token.help = "The results will be written here. This directory will be created automatically and it must not exist previously. (This argument is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "selection_list"; token.key_prefix = "--"; token.label = "Micrograph selection file"; token.help = "Specify a name of micrograph selection list text file. The file extension must be '.txt'. If this is not provided, all files matched with the micrograph name pattern will be processed. (This option is specific to SPHIRE, and not directly used by summovie executable.) "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "any_micrograph"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_frames"; token.key_prefix = "--"; token.label = "Number of movie frames"; token.help = "The number of movie frames in each input micrograph. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "3"; token.restore = "3"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "first"; token.key_prefix = "--"; token.label = "First movie frame"; token.help = "First movie frame for summing. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "last"; token.key_prefix = "--"; token.label = "Last movie frame"; token.help = "Last movie frame for summing. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "-1"; token.restore = "-1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pixel_size"; token.key_prefix = "--"; token.label = "Pixel size [A]"; token.help = "The pixel size of input micrographs. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "apix"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "nr_threads"; token.key_prefix = "--"; token.label = "Number of threads"; token.help = "The number of threads summovie can use. The higher the faster, but it requires larger memory. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "1"; token.restore = "1"; token.type = "int"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "apply_dose_filter"; token.key_prefix = "--"; token.label = "Apply dose filter"; token.help = "Requires voltage, exposure per frame, and pre exposure options. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = False; token.restore = False; token.type = "bool"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "voltage"; token.key_prefix = "--"; token.label = "Microscope voltage [kV]"; token.help = "The acceleration voltage of microscope used for imaging. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "300.0"; token.restore = "300.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "exposure_per_frame"; token.key_prefix = "--"; token.label = "Per frame exposure [e/A^2]"; token.help = "The electron dose per frame in e/A^2. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "2.0"; token.restore = "2.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "pre_exposure"; token.key_prefix = "--"; token.label = "Pre-exposure [e/A^2]"; token.help = "The electron does in e/A^2 used for exposure prior to imaging. Effective only when apply_dose_filter option is used. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = "0.0"; token.restore = "0.0"; token.type = "float"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "dont_restore_noise"; token.key_prefix = "--"; token.label = "Restore noise power"; token.help = "Indicate if noise power should be restored or not. By default, restore noise power. "; token.group = "advanced"; token.is_required = False; token.is_locked = False; token.default = True; token.restore = True; token.type = "bool"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
sxcmd = SXcmd(); sxcmd.name = "e2bdb"; sxcmd.subname = ""; sxcmd.mode = "makevstack"; sxcmd.label = "Create Virtual Stack"; sxcmd.short_info = "Make a 'virtual' BDB image stack with the specified name from one or more other stacks. "; sxcmd.mpi_support = False; sxcmd.mpi_add_flag = False; sxcmd.category = "sxc_utilities"; sxcmd.role = "sxr_util"; sxcmd.is_submittable = True
token = SXcmd_token(); token.key_base = "makevstack"; token.key_prefix = "--"; token.label = "Output virtual image stack"; token.help = "Make a 'virtual' BDB image stack with the specified name from one or more other stacks. "; token.group = "main"; token.is_required = True; token.is_locked = False; token.default = ""; token.restore = ""; token.type = "output"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "input_bdb_stack_file"; token.key_prefix = ""; token.label = "Input BDB image stack"; token.help = "Specify path to input BDB stack file. "; token.group = "main"; token.is_required = True; token.is_locked = None; token.default = ""; token.restore = ""; token.type = "bdb"; sxcmd.token_list.append(token)
token = SXcmd_token(); token.key_base = "list"; token.key_prefix = "--"; token.label = "File containing selection list of images"; token.help = "Input ASCII file containing a list of selected image names to creates a new virtual BDB image stack from an existed virtual stack. "; token.group = "main"; token.is_required = False; token.is_locked = False; token.default = "none"; token.restore = "none"; token.type = "parameters"; sxcmd.token_list.append(token)
sxcmd_list.append(sxcmd)
# @@@@@ END_INSERTION @@@@@
# Create dictionaries from the constructed lists
for sxcmd_category in sxcmd_category_list:
sxcmd_category_dict[sxcmd_category.name] = sxcmd_category
# Create command token dictionary for each SXcmd instance
# Then, register SXcmd instance to an associated SXcmd_category
for sxcmd in sxcmd_list:
for sxcmd_token in sxcmd.token_list:
# Handle very special cases
if sxcmd_token.type == "function":
n_widgets = 2 # function type has two line edit boxes
sxcmd_token.label = [sxcmd_token.label, "Python script for user function"]
sxcmd_token.help = [sxcmd_token.help, "Please leave it blank if file is not external to sphire"]
sxcmd_token.default = [sxcmd_token.default, "none"]
if not sxcmd_token.is_locked:
sxcmd_token.restore = sxcmd_token.default
# else: Do nothing for the other types
# Register this to command token dictionary
sxcmd.token_dict[sxcmd_token.key_base] = sxcmd_token
# Register this to command to command category dictionary
assert sxcmd_category_dict.has_key(sxcmd.category), "sxcmd.category %s" % (sxcmd.category)
sxcmd_category_dict[sxcmd.category].cmd_list.append(sxcmd)
# Store the constructed lists and dictionary as a class data member
self.sxcmd_category_list = sxcmd_category_list
def update_qsub_enable_states(self):
# Construct and add widgets for sx command categories
for sxcmd_category in self.sxcmd_category_list:
# Create SXCmdCategoryWidget for this command category
for sxcmd in sxcmd_category.cmd_list:
sxcmd.widget.sxcmd_tab_main.set_qsub_enable_state()
def handle_sxmenu_item_btn_event(self, sxmenu_item):
assert(isinstance(sxmenu_item, SXmenu_item) == True) # Assuming the sxmenu_item is an instance of class SXmenu_item
modifiers = QApplication.keyboardModifiers()
if modifiers == Qt.ShiftModifier:
sxmenu_item_wiki_url = SXLookFeelConst.generate_sxmenu_item_wiki_url(sxmenu_item)
print "Opening Wiki Page ..."
print sxmenu_item_wiki_url
os.system("python -m webbrowser %s" % (sxmenu_item_wiki_url))
return
if self.cur_sxmenu_item == sxmenu_item: return
if self.cur_sxmenu_item != None:
self.cur_sxmenu_item.btn.setStyleSheet(self.cur_sxmenu_item.btn.customButtonStyle)
self.cur_sxmenu_item = sxmenu_item
if self.cur_sxmenu_item != None:
self.cur_sxmenu_item.btn.setStyleSheet(self.cur_sxmenu_item.btn.customButtonStyleClicked)
self.sxmenu_item_widget_stacked_layout.setCurrentWidget(self.cur_sxmenu_item.widget)
def closeEvent(self, event):
event.ignore() # event.accept()
# Quit child applications of all sxcmd widgets
for sxcmd_category in self.sxcmd_category_list:
sxcmd_category.widget.quit_all_child_applications()
print("bye bye")
QtCore.QCoreApplication.instance().quit()
# def changeEvent(self, event):
# print(self.frameGeometry())
# ========================================================================================
def main():
from optparse import OptionParser
progname = os.path.basename(sys.argv[0])
usage = progname + """
The main SPHIRE GUI application. It is designed as the command generator for the SPHIRE single particle analysis pipeline.
"""
parser = OptionParser(usage, version=SPARXVERSION)
# No options!!! Does not need to call parser.add_option()
(options, args) = parser.parse_args(sys.argv[1:])
if len(args) > 1:
print "see usage " + usage
sys.exit()
sxapp = QApplication(sys.argv)
# The valid keys can be retrieved using the keys() function.
# Typically they include "windows", "motif", "cde", "plastique" and "cleanlooks".
# Depending on the platform, "windowsxp", "windowsvista" and "macintosh" may be available. Note that keys are case insensitive.
# sxapp.setStyle("macintosh")
sxapp.setStyle("cleanlooks")
# sxapp.setStyle("plastique")
# print "MRK_DEBUG:"
# print "MRK_DEBUG: sxapp.style().metaObject().className() == %s" % (str(sxapp.style().metaObject().className()))
# for key in QStyleFactory.keys():
# print "MRK_DEBUG: str(key) == %s" % str(key)
# print "MRK_DEBUG: QStyleFactory.create(key) = %s" % (str(QStyleFactory.create(key).metaObject().className()))
# if sxapp.style().metaObject().className() == QStyleFactory.create(key).metaObject().className():
# print "MRK_DEBUG: !!!USING THE STYLE: %s!!!" % str(key)
# print "MRK_DEBUG:"
sxapp.setWindowIcon(QIcon(get_image_directory()+"sxgui_icon_sphire.png"))
sxapp_font = sxapp.font()
sxapp_font_info = QFontInfo(sxapp.font())
# new_point_size = sxapp_font_info.pointSize() + 1
new_point_size = sxapp_font_info.pointSize()
# # MRK_DEBUG: Check the default system font
# print "MRK_DEBUG: sxapp_font_info.style() = ", sxapp_font_info.style()
# print "MRK_DEBUG: sxapp_font_info.styleHint() = ", sxapp_font_info.styleHint()
# print "MRK_DEBUG: sxapp_font_info.styleName() = ", sxapp_font_info.styleName()
# print "MRK_DEBUG: sxapp_font_info.family() = ", sxapp_font_info.family()
# print "MRK_DEBUG: sxapp_font_info.fixedPitch() = ", sxapp_font_info.fixedPitch()
# print "MRK_DEBUG: sxapp_font_info.pixelSize() = ", sxapp_font_info.pixelSize()
# print "MRK_DEBUG: sxapp_font_info.pointSize() = ", sxapp_font_info.pointSize()
# print "MRK_DEBUG: sxapp_font_info.pointSizeF() = ", sxapp_font_info.pointSizeF()
# print "MRK_DEBUG: sxapp_font_info.bold () = ", sxapp_font_info.bold()
# print "MRK_DEBUG: sxapp_font_info.italic() = ", sxapp_font_info.italic()
#
# NOTE: 2019/02/19 Toshio Moriya
# The following method of changing font size works with Linux.
# However, it does not work Mac OSX. The text of widget classes below won't change,
# still showing the default font size:
# QPushButton, QLable, Window Title, and QToolTip
#
# sxapp_font.setPointSize(new_point_size) # and setPointSizeF() are device independent, while setPixelSize() is device dependent
# sxapp.setFont(sxapp_font)
# sxapp.setStyleSheet("QPushButton {font-size:18pt;}"); # NOTE: 2016/02/19 Toshio Moriya: Doesn't work
# sxapp.setStyleSheet("QLabel {font-size:18pt;}"); # NOTE: 2016/02/19 Toshio Moriya: Doesn't work
# sxapp.setStyleSheet("QToolTip {font-size:14pt; color:white; padding:2px; border-width:2px; border-style:solid; border-radius:20px; background-color: black; border: 1px solid white;}");
sxapp.setStyleSheet("QToolTip {font-size:%dpt;}" % (new_point_size));
# Initialise a singleton class for look & feel constants
SXLookFeelConst.initialise(sxapp)
# Define the main window (class SXMainWindow)
sxmain_window = SXMainWindow()
sxmain_window.setWindowTitle("SPHIRE-GUI Main (Beta Version)")
sxmain_window.setMinimumWidth(SXLookFeelConst.sxmain_window_width)
sxmain_window.setMinimumHeight(SXLookFeelConst.sxmain_window_height)
sxmain_window.resize(SXLookFeelConst.sxmain_window_width, SXLookFeelConst.sxmain_window_height)
sxmain_window.move(QPoint(SXLookFeelConst.sxmain_window_left, SXLookFeelConst.sxmain_window_top));
# Show main window
sxmain_window.show()
sxmain_window.raise_()
# Update qsub enable state of all sx command category widgets after window is displayed and raised
sxmain_window.update_qsub_enable_states()
# Start event handling loop
sxapp.exec_()
# ========================================================================================
if __name__ == "__main__":
main()
# ========================================================================================
# END OF SCRIPT
# ========================================================================================
sphire_beta20170901_patch20170906/src/eman2/sparx/bin/sxsort3d_new.py 0000775 0030616 0076400 00001134211 13153776513 025460 0 ustar stabrin Domain Users #!/usr/bin/env python
#
#
# 08/26/2016
# New version of sort3D.
#
from __future__ import print_function
import os
import sys
import types
import global_def
from global_def import *
from optparse import OptionParser
from sparx import *
from EMAN2 import *
from numpy import array
from logger import Logger, BaseLogger_Files
from mpi import *
from math import *
from random import *
import shutil
import os
import sys
import subprocess
import time
import string
import json
from sys import exit
from time import localtime, strftime, sleep
global Tracker, Blockdata
# ------------------------------------------------------------------------------------
mpi_init(0, [])
nproc = mpi_comm_size(MPI_COMM_WORLD)
myid = mpi_comm_rank(MPI_COMM_WORLD)
Blockdata = {}
# MPI stuff
Blockdata["nproc"] = nproc
Blockdata["myid"] = myid
Blockdata["main_node"] = 0
Blockdata["shared_comm"] = mpi_comm_split_type(MPI_COMM_WORLD, MPI_COMM_TYPE_SHARED, 0, MPI_INFO_NULL)
Blockdata["myid_on_node"] = mpi_comm_rank(Blockdata["shared_comm"])
Blockdata["no_of_processes_per_group"] = mpi_comm_size(Blockdata["shared_comm"])
masters_from_groups_vs_everything_else_comm = mpi_comm_split(MPI_COMM_WORLD, Blockdata["main_node"] == Blockdata["myid_on_node"], Blockdata["myid_on_node"])
Blockdata["color"], Blockdata["no_of_groups"], balanced_processor_load_on_nodes = get_colors_and_subsets(Blockdata["main_node"], MPI_COMM_WORLD, Blockdata["myid"], \
Blockdata["shared_comm"], Blockdata["myid_on_node"], masters_from_groups_vs_everything_else_comm)
# We need two nodes for processing of volumes
if(Blockdata["no_of_groups"] > 1):
Blockdata["node_volume"] = [Blockdata["no_of_groups"]-2, Blockdata["no_of_groups"]-1]
#Blockdata["nodes"] = [Blockdata["no_of_groups"]-2, Blockdata["no_of_groups"]-1] # For 3D stuff take last two nodes
else:
Blockdata["node_volume"] = [0, 0]
# We need two CPUs for processing of volumes, they are taken to be main CPUs on each volume
# We have to send the two myids to all nodes so we can identify main nodes on two selected groups.
if(Blockdata["no_of_groups"] > 1): Blockdata["main_shared_nodes"] = [Blockdata["node_volume"][0]*Blockdata["no_of_processes_per_group"],Blockdata["node_volume"][1]*Blockdata["no_of_processes_per_group"]]
else: Blockdata["main_shared_nodes"] = [0, 1]
Blockdata["nproc_previous"] = 0
# End of Blockdata: sorting requires at least three nodes, and the used number of nodes be integer times of three
global_def.BATCH = True
global_def.MPI = True
global _proc_status, _scale, is_unix_cluster
try:
_proc_status = '/proc/%d/status' % os.getpid()
_scale = {'kB': 1024.0, 'mB': 1024.0*1024.0,'KB': 1024.0, 'MB': 1024.0*1024.0}
is_unix_cluster = True
except:
if Blockdata["myid"]==Blockdata["main_node"]:print("Not a unix machine")
is_unix_cluster = False
def create_subgroup():
# select a subset of myids to be in subdivision
if( Blockdata["myid_on_node"] < Blockdata["ncpuspernode"] ): submyids = [Blockdata["myid"]]
else: submyids = []
submyids = wrap_mpi_gatherv(submyids, Blockdata["main_node"], MPI_COMM_WORLD)
submyids = wrap_mpi_bcast(submyids, Blockdata["main_node"], MPI_COMM_WORLD)
#if( Blockdata["myid"] == Blockdata["main_node"] ): print(submyids)
world_group = mpi_comm_group(MPI_COMM_WORLD)
subgroup = mpi_group_incl(world_group,len(submyids),submyids)
Blockdata["subgroup_comm"] = mpi_comm_create(MPI_COMM_WORLD, subgroup)
mpi_barrier(MPI_COMM_WORLD)
Blockdata["subgroup_size"] = -1
Blockdata["subgroup_myid"] = -1
if (MPI_COMM_NULL != Blockdata["subgroup_comm"]):
Blockdata["subgroup_size"] = mpi_comm_size(Blockdata["subgroup_comm"])
Blockdata["subgroup_myid"] = mpi_comm_rank(Blockdata["subgroup_comm"])
# "nodes" are zero nodes on subgroups on the two "node_volume" that compute backprojection
Blockdata["nodes"] = [Blockdata["node_volume"][0]*Blockdata["ncpuspernode"], Blockdata["node_volume"][1]*Blockdata["ncpuspernode"]]
mpi_barrier(MPI_COMM_WORLD)
return
def check_mpi_settings(log):
global Tracker, Blockdata
from utilities import wrap_mpi_bcast, read_text_file, bcast_number_to_all
import os
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
current_mpi_settings_is_bad = 0
if(Blockdata["myid"] == Blockdata["main_node"]):
fsc_refinement = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "fsc_global.txt"))
q = float(Tracker["constants"]["img_per_grp"])/float(Tracker["constants"]["total_stack"])
for ifreq in xrange(len(fsc_refinement)): fsc_refinement[ifreq] = fsc_refinement[ifreq]*q/(1.-fsc_refinement[ifreq]*(1.-q))
res = 0.0
for ifreq in xrange(len(fsc_refinement)):
if fsc_refinement[ifreq]<0.143: break
res = float(ifreq)/2./float(len(fsc_refinement))
nxinit = int(2.*res*Tracker["constants"]["nnxo"])
del fsc_refinement
else: nxinit =0
nxinit = bcast_number_to_all(nxinit, Blockdata["main_node"], MPI_COMM_WORLD)
sys_required_mem = 1.0*Blockdata["no_of_processes_per_group"]
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "------->>>>>>>Check memory and mpi settings<<<<-------------"
log.add(msg)
print(line, msg)
msg ="cpus number: %5d node number: %5d cpu number per group: %5d"%(Blockdata["nproc"], Blockdata["no_of_groups"], Blockdata["no_of_processes_per_group"])
log.add(msg)
print(line, msg)
try:
image_org_size = Tracker["constants"]["nnxo"]
image_in_core_size = nxinit
ratio = float(nxinit)/float(image_org_size)
raw_data_size = float(Tracker["constants"]["total_stack"]*image_org_size*image_org_size)*4.0/1.e9
raw_data_size_per_node = float(Tracker["constants"]["total_stack"]*image_org_size*image_org_size)*4.0/1.e9/Blockdata["no_of_groups"]
sorting_data_size_per_node = raw_data_size_per_node + 2.*raw_data_size_per_node*ratio**2
volume_size_per_node = (4.*image_in_core_size**3*8.)*Blockdata["no_of_processes_per_group"]/1.e9
except: current_mpi_settings_is_bad = 1
if current_mpi_settings_is_bad == 1:ERROR("initial info is not provided", "check_mpi_settings", 1, Blockdata["myid"])
try:
mem_bytes = os.sysconf('SC_PAGE_SIZE')*os.sysconf('SC_PHYS_PAGES')# e.g. 4015976448
mem_gib = mem_bytes/(1024.**3) # e.g. 3.74
if( Blockdata["myid"] == Blockdata["main_node"]):print(line, "system mem info: %5.1f G"%mem_gib)
except:
mem_gib = None
if( Blockdata["myid"] == Blockdata["main_node"]):print(line, "It is not an unix machine!")
else: pass
if Tracker["constants"]["memory_per_node"] == -1.:
if mem_gib: total_memory = mem_gib
else:
total_memory = Blockdata["no_of_processes_per_group"]*2.0 # assume each CPU has 2.0 G
if( Blockdata["myid"] == Blockdata["main_node"]):
msg ="memory per node is not provided, sort3d assumes 2G per node"
log.add(msg)
print(line, msg)
Tracker["constants"]["memory_per_node"] = total_memory
else:
msg ="memory per node: %f"%Tracker["constants"]["memory_per_node"]
total_memory = Tracker["constants"]["memory_per_node"]
if( Blockdata["myid"] == Blockdata["main_node"]):
log.add(msg)
print(line, msg)
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "total number of particles: %d number of particles per group: %d"%(Tracker["constants"]["total_stack"], Tracker["constants"]["img_per_grp"])
log.add(msg)
print(line, msg)
if(Blockdata["myid"] == Blockdata["main_node"]):
msg = "the available memory: %5.1f G"%total_memory
log.add(msg)
print(line, msg)
msg = "total raw data: %5.1f G raw data per node: %5.1f G"%(raw_data_size, raw_data_size_per_node)
log.add(msg)
print(line, msg)
if (total_memory - sys_required_mem - raw_data_size_per_node - volume_size_per_node - sorting_data_size_per_node - 5.0) <0.0:
current_mpi_settings_is_bad = 1
new_nproc = raw_data_size*(2.*ratio**2+1.)*Blockdata["no_of_processes_per_group"]/(total_memory - 5. - sys_required_mem - volume_size_per_node)
new_nproc = int(new_nproc)
if( Blockdata["myid"] == Blockdata["main_node"]):
msg ="Suggestion: use number of processes %d"%int(new_nproc)
print(line, msg)
log.add(msg)
ERROR("In sufficient memory", "check_mpi_settings", 1, Blockdata["myid"])
images_per_cpu = float(Tracker["constants"]["total_stack"])/float(Blockdata["nproc"])
images_per_cpu_for_unaccounted_data = Tracker["constants"]["img_per_grp"]*1.5/float(Blockdata["nproc"])
if( Blockdata["myid"] == Blockdata["main_node"]):
msg="current images per cpu: %d "%int(images_per_cpu)
log.add(msg)
print(line, msg)
if images_per_cpu < 5.0: ERROR("image per cpu less than 5", "check_mpi_settings", 1, Blockdata["myid"])
return
def get_sorting_image_size(original_data, partids, sparamstructure, snorm_per_particle, log):
global Tracker, Blockdata
from utilities import wrap_mpi_bcast, read_text_file, write_text_file
from applications import MPI_start_end
iter = 0
if(Blockdata["myid"] == Blockdata["main_node"]):
msg = "start reconstruction with refinement window_size %d"%Tracker["nxinit_refinement"]
print(msg)
log.add(msg)
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
iter_assignment = []
for im in xrange(len(lpartids[0])):
iter_assignment.append(randint(0,number_of_groups-1))# simple version
else:
iter_assignment = lpartids[0]
else: iter_assignment = 0
iter_assignment = wrap_mpi_bcast(iter_assignment, Blockdata["main_node"])
proc_list = [[None, None] for iproc in xrange(Blockdata["nproc"])]
for iproc in xrange(Blockdata["nproc"]):
iproc_image_start, iproc_image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], iproc)
proc_list[iproc] = [iproc_image_start, iproc_image_end]
compute_noise(Tracker["nxinit_refinement"])
rdata = downsize_data_for_rec3D(original_data, Tracker["nxinit_refinement"], return_real = False, npad = 1)
update_rdata_assignment(iter_assignment, proc_list, Blockdata["myid"], rdata)
Tracker["nxinit"] = Tracker["nxinit_refinement"]
compute_noise(Tracker["nxinit"])
do3d_sorting_groups_fsc_only_iter(rdata, sparamstructure, snorm_per_particle, iteration = iter)
del rdata
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "reconstruction with refinement window_size %d finshes"%Tracker["nxinit_refinement"]
print(msg)
log.add(msg)
if( Blockdata["myid"] == Blockdata["main_node"]):
fsc_data = []
for igroup in xrange(Tracker["number_of_groups"]):
for ichunk in xrange(2):
tmp_fsc_data = read_text_file(os.path.join(Tracker["directory"], "fsc_driver_chunk%d_grp%03d_iter%03d.txt"%(ichunk, igroup, iter)), -1)
fsc_data.append(tmp_fsc_data[0])
else: fsc_data = 0
fsc_data = wrap_mpi_bcast(fsc_data, Blockdata["main_node"])
avg_fsc = [0.0 for i in xrange(len(fsc_data[0]))]
avg_fsc[0] = 1.0
for igroup in xrange(1): # Use group zero first
for ifreq in xrange(1, len(fsc_data[0])):avg_fsc[ifreq] += fsc_data[igroup][ifreq]
fsc143 = len(fsc_data[0])
for igroup in xrange(Tracker["number_of_groups"]*2):
for ifreq in xrange(1, len(fsc_data[igroup])):
fsc_data[igroup][ifreq] = 2.*fsc_data[igroup][ifreq]/(1.+fsc_data[igroup][ifreq])
if fsc_data[igroup][ifreq] < 0.143: break
fsc143 = min(fsc143, ifreq)
if fsc143 !=0: nxinit = int(fsc143)*2
else: ERROR("program obtains wrong image size", "EQKmeans_by_dmatrix_orien_groups", 1, Blockdata["myid"])
if(Blockdata["myid"] == Blockdata["main_node"]): write_text_file(avg_fsc, os.path.join(Tracker["directory"], "fsc_image_size.txt"))
del iter_assignment
del proc_list
del fsc_data
del avg_fsc
return nxinit
def compute_noise(image_size):
global Tracker, Blockdata
from utilities import get_im, model_blank
from fundamentals import fft
if Tracker["applybckgnoise"]: # from SPARX refinement only
tsd = get_im(Tracker["bckgnoise"]) # invert power spectrum
nnx = tsd.get_xsize()
nny = tsd.get_ysize()
temp_image = model_blank(image_size, image_size)
temp_image = fft(temp_image)
nx = temp_image.get_xsize()
ny = temp_image.get_ysize()
Blockdata["bckgnoise"] = []
Blockdata["unrolldata"] = []
for i in xrange(nny):
prj = nnx*[0.0]
for k in xrange(nnx):
if tsd.get_value_at(k,i)>0.0: prj[k] = tsd.get_value_at(k,i)
Blockdata["bckgnoise"].append(prj)
for i in xrange(len(Blockdata["bckgnoise"])): Blockdata["unrolldata"].append(Util.unroll1dpw(ny, Blockdata["bckgnoise"][i]))
else: # from datastack and relion
temp_image = model_blank(image_size, image_size)
temp_image = fft(temp_image)
nx = temp_image.get_xsize()
ny = temp_image.get_ysize()
Blockdata["bckgnoise"] = [1.0]*nx
Blockdata["unrolldata"] = Util.unroll1dpw(ny, nx*[1.0])
return
####------major procedure
def do_EQKmeans_nways_clustering_stable_seeds(workdir, initial_partids, params, sort_res, log_main):
global Tracker, Blockdata
from utilities import read_text_file, wrap_mpi_bcast, write_text_file
import copy
import shutil
from math import sqrt
less_than_random_assignment = 0
# Iteratively using EQKmeans to split a dataset into clusters of equal size till the number of
# the unaccounted is less than the minimum size of a cluster
# input: initial_partids
keepgoing = 1
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if Blockdata["myid"] == Blockdata["main_node"]:
msg = "----->>>do_EQKmeans_nways_clustering<<<------"
print(line, msg)
log_main.add(msg)
Tracker["unaccounted_list"] = read_text_file(initial_partids, -1) # read all columns
if len(Tracker["unaccounted_list"])>1: Tracker["unaccounted_list"] = Tracker["unaccounted_list"][1] # two column entries
else: Tracker["unaccounted_list"] = Tracker["unaccounted_list"][0] # only one column
else: Tracker["unaccounted_list"] = 0
Tracker["unaccounted_list"] = wrap_mpi_bcast(Tracker["unaccounted_list"], Blockdata["main_node"], MPI_COMM_WORLD)
generation = 0
Tracker["total_stack"] = len(Tracker["unaccounted_list"])
Tracker["number_of_groups"] = get_number_of_groups(Tracker["total_stack"],Tracker["img_per_grp"])
partids = initial_partids
angle_step = Tracker["angle_step"]
if Tracker["number_of_groups"]>1: # In case the number of the input particles is small
while Tracker["number_of_groups"] >1:
Tracker["min_orien_group_size"] = Tracker["number_of_groups"]*Tracker["minimum_ptl_number"]
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["partition_list"] = []
Tracker["directory"] = os.path.join(workdir, "generation%03d"%generation)
#cmd="{} {}".format("mkdir",Tracker["directory"])
os.mkdir(Tracker["directory"])
cmd="{} {}".format("mkdir",os.path.join(Tracker["directory"], "tempdir"))
os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
log_main.add("-------->>> generation %5d"%generation)
log_main.add("number of images per group: %d"%Tracker["img_per_grp"])
log_main.add("the initial number of groups: %d number of independent runs: %d"%(Tracker["number_of_groups"], Tracker["constants"]["indep_runs"]))
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
create_nrandom_lists(partids)
ptls_in_orien_groups = get_orien_assignment_mpi(angle_step, partids, params, log_main)
Tracker["nxinit"] = Tracker["nxinit_refinement"]
previous_params = Tracker["previous_parstack"]
partition_score_dict = compute_pairwise_agreement_ratio(Tracker["random_assignment"], Tracker["number_of_groups"])
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["output"].append("--->>>agreement ratio among the %d intitial random assignments<<<-----"%len(Tracker["random_assignment"]))
nscore = 0
tmp_sum = 0.0
tmp_sum2 = 0.0
for iptp in xrange(len(Tracker["random_assignment"])-1):
for jptp in xrange(iptp+1, len(Tracker["random_assignment"])):
msg = " %d %d % 5.2f"%(partition_score_dict[nscore][0], partition_score_dict[nscore][1], partition_score_dict[nscore][2])
Tracker["output"].append(msg)
tmp_sum +=partition_score_dict[nscore][2]
tmp_sum2 +=partition_score_dict[nscore][2]*partition_score_dict[nscore][2]
nscore +=1
tmp_sigma = sqrt(tmp_sum2 - tmp_sum*tmp_sum/(nscore+1.))
Tracker["output"].append("---> the theoretial agreement ratio at current conditions: %5.2f "%(1./float(Tracker["number_of_groups"])*100.))
nscore = 0
for iptp in xrange(len(Tracker["random_assignment"])-1):
for jptp in xrange(iptp+1, len(Tracker["random_assignment"])):
if abs(partition_score_dict[nscore][2]-1./float(Tracker["number_of_groups"])*100.)> 3.*tmp_sigma:
Tracker["output"].append("agreement of random assignment %d and %d is too large, quit"%(iptp, jptp))
nscore +=1
keepgoing = 0
if keepgoing ==1:Tracker["output"].append("------>initial assignments are all reasonably random")
else: Tracker = 0
keepgoing = bcast_number_to_all(keepgoing, Blockdata["main_node"], MPI_COMM_WORLD)
if keepgoing ==0: ERROR("agreement ratio between two initial assignments is out of 3.0 sigma of the theoretial value %f"%(1./float(Tracker["number_of_groups"])*100.), "check_mpi_settings", 1, Blockdata["myid"])
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
original_data, norm_per_particle = read_data_for_sorting(partids, params, previous_params)
if Tracker["nosmearing"]:
parameterstructure = None
Tracker["paramstructure_dict"] = None
Tracker["paramstructure_dir"] = None
else: parameterstructure = read_paramstructure_for_sorting(partids, Tracker["paramstructure_dict"], Tracker["paramstructure_dir"])
Tracker["nxinit"] = get_sorting_image_size(original_data, os.path.join(Tracker["directory"], "independent_index_000.txt"), parameterstructure, norm_per_particle, log_main)
if Blockdata["myid"] == Blockdata["main_node"]:
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg ="image size for sorting under current group size is %d"%Tracker["nxinit"]
print(line, msg)
log_main.add(msg)
if os.path.exists(os.path.join(Tracker["directory"], "tempdir")): shutil.rmtree(os.path.join(Tracker["directory"], "tempdir"))
npremature = 0
for indep_run_iter in xrange(0, Tracker["constants"]["indep_runs"]): # N independent runs
Tracker["indep_run_iter"] = indep_run_iter
index_file = os.path.join(Tracker["directory"],"independent_index_%03d.txt"%indep_run_iter)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["directory"] = os.path.join(workdir,"generation%03d"%generation, "EQKmeans_%03d"%Tracker["indep_run_iter"])
msg = "indep_run_iter %d"%indep_run_iter
print(line, msg)
log_main.add(msg)
if not os.path.exists(Tracker["directory"]):os.mkdir(Tracker["directory"])
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")): os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
tmp_final_list, premature = EQKmeans_by_dmatrix_orien_groups(original_data, index_file, ptls_in_orien_groups, parameterstructure, norm_per_particle, clean_volumes= True)
Tracker["directory"] = os.path.join(workdir, "generation%03d"%generation)
if Blockdata["myid"] == Blockdata["main_node"]: Tracker["partition_list"].append(Tracker["partition"])
if premature ==1: npremature +=1
Tracker["partition_list"] = wrap_mpi_bcast(Tracker["partition_list"], Blockdata["main_node"], MPI_COMM_WORLD)
do_two_way_comparison_over_nindepruns(log_main)
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["directory"] = os.path.join(workdir, "generation%03d"%generation, "Reassign_from_EQKmeans")
os.mkdir(Tracker["directory"])
msg = "accounted ratio %f for %d independent EQKmeans "%((len(Tracker["accounted_list"])/float(Tracker["total_stack"])*100.), Tracker["constants"]["indep_runs"])
log_main.add(msg)
print(msg)
if npremature !=0: msg = " premature runs %d out of %d independent runs"%(npremature, Tracker["constants"]["indep_runs"])
else: msg =" %d EQKmeans runs all end up with mature stop"%Tracker["constants"]["indep_runs"]
log_main.add(msg)
print(msg)
Tracker["output"].append("Generation %3d accounted ratio %5.3f "%(generation, len(Tracker["accounted_list"])/float(Tracker["total_stack"])*100.))
if len(Tracker["accounted_list"])/float(Tracker["total_stack"]) < 1./float(Tracker["number_of_groups"]): less_than_random_assignment =1
less_than_random_assignment = bcast_number_to_all(less_than_random_assignment, Blockdata["main_node"], MPI_COMM_WORLD)
if less_than_random_assignment ==1: ERROR("accounted ratio less than accounted ratio of random assignment ", "sxsort3d.py", 1, Blockdata["myid"])
tmp_list = []
npremature = 0
for riter in xrange(2): # Set it to two for the time being
assign_list = resize_groups_from_stable_members_mpi(Tracker["Accounted_on_disk"], Tracker["Unaccounted_on_disk"])
Tracker["directory"] = os.path.join(workdir,"generation%03d"%generation,"Reassign_from_EQKmeans", "EQKmeans_random%d"%riter)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if Blockdata["myid"] == Blockdata["main_node"]:
msg ="------>>>Reassign unaccounted ones to accounted in groups %d <<<-------"%riter
print(line, msg)
log_main.add(msg)
if npremature>0: msg = " premature runs %d out of two runs"%npremature
else: msg = "mature stop"
print(line, msg)
log_main.add(msg)
os.mkdir(Tracker["directory"])
write_text_file(assign_list[0], os.path.join(Tracker["directory"], "random_list.txt"))
index_file = os.path.join(Tracker["directory"], "random_list.txt")
tmp_final_list, premature = EQKmeans_by_dmatrix_orien_groups(original_data, index_file, ptls_in_orien_groups, parameterstructure, norm_per_particle, clean_volumes= True)
if premature ==1: npremature +=1
newlist = [[],[]]
for im in xrange(len(tmp_final_list)):
newlist[0].append(tmp_final_list[im][0])
newlist[1].append(tmp_final_list[im][1])
tmp_list.append(newlist)
if Blockdata["myid"] == Blockdata["main_node"]: write_text_row(tmp_final_list, os.path.join(workdir, "generation%03d"%generation, "Reassign_from_EQKmeans", "res_list%d.txt"%riter))
Tracker["directory"] = os.path.join(workdir,"generation%03d"%generation, "Reassign_from_EQKmeans")
ratio, matched_pairs, clusters = compare_two_iterations(tmp_list[0][0], tmp_list[1][0], Tracker["number_of_groups"])
if Blockdata["myid"] == Blockdata["main_node"]:
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
final_list = []
pall = read_text_file(os.path.join(workdir, "generation%03d"%generation, "independent_index_000.txt"), -1)
for ic in xrange(len(clusters)):
any = clusters[ic]
any.tolist()
orig_any = []
for a in any: orig_any.append(pall[1][a])
final_list.append(orig_any)
write_text_file(orig_any, os.path.join(Tracker["directory"], "ncluster%d.txt")%ic)
plist, qlist, ulist = clusters_to_plist(clusters, pall[1])
sort_res.append(qlist)
msg = "the reproducible ratio after re-assignment %f"%(ratio*100.)
Tracker["output"].append("Generation %3d accounted ratio after reassignment %5.3f "%(generation, ratio*100.))
print(line, msg)
log_main.add(msg)
write_text_file( plist, os.path.join(Tracker["directory"], "plist.txt"))
write_text_file( ulist, os.path.join(Tracker["directory"], "Unaccounted.txt"))
else:
final_list = 0
qlist = 0
final_list = wrap_mpi_bcast(final_list, Blockdata["main_node"], MPI_COMM_WORLD)
qlist = wrap_mpi_bcast(qlist, Blockdata["main_node"], MPI_COMM_WORLD)
Tracker["Unaccounted_on_disk"] = os.path.join(Tracker["directory"], "Unaccounted.txt") # the input for the next round iteration
# resample to particle original data size from stable members
partids = Tracker["Unaccounted_on_disk"]
if Blockdata["myid"] == Blockdata["main_node"]:
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "Smallest group size %d "%Tracker["constants"]["minimum_grp_size"]
log_main.add(msg)
print(line, msg)
msg = "the final number of groups: %d"%Tracker["number_of_groups"]
Tracker["total_stack"] = len(read_text_row(partids))
Tracker["number_of_groups"] = get_number_of_groups(Tracker["total_stack"], Tracker["img_per_grp"])
msg ="Intermediate results are reconstructed in directory %s"%Tracker["directory"]
print(msg)
log_main.add(msg)
tmpdir = os.path.join(Tracker["directory"], "tempdir")
if os.path.exists(tmpdir): shutil.rmtree(tmpdir)
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
generation +=1
del original_data
del parameterstructure
del norm_per_particle
else:
if Blockdata["myid"] == Blockdata["main_node"]:
msg ="The total number of particles is less than number_of_particles_per_group"
print(line, msg)
log_main.add(msg)
if os.path.exsists(os.path.join(Tracker["directory"], "tempdir")): shutil.rmtree(os.path.join(Tracker["directory"], "tempdir"))
else: final_list = 0
final_list = wrap_mpi_bcast(final_list, Blockdata["main_node"], MPI_COMM_WORLD)
del original_data
del parameterstructure
del norm_per_particle
return final_list # the result of the last iteration
### EQKmeans
def EQKmeans_by_dmatrix_orien_groups(original_data, partids, ptls_in_orien_groups, paramstructure, norm_per_particle, clean_volumes = False):
global Tracker, Blockdata
import shutil
#<<<<---------- >>>>EQKmeans starts<<<<------------
log = Logger()
log = Logger(BaseLogger_Files())
log.prefix = Tracker["directory"]+"/"
total_stack = Tracker["total_stack"]
premature = 0
changed_nptls = 100.0
number_of_groups = Tracker["number_of_groups"]
nima = len(original_data)
image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
iter_max = Tracker["total_number_of_iterations"]//3
stopercnt = 3.
total_iter = 0
require_check_setting = False
partial_rec3d = False
best_score = 100.0
best_assignment = []
if Tracker["mask3D"]:
mask3D = get_im(Tracker["mask3D"])
if mask3D.get_xsize() != Tracker["nxinit"]: mask3D = fdecimate(mask3D, Tracker["nxinit"], Tracker["nxinit"], Tracker["nxinit"], True, False)
else:
mask3D = model_circle(Tracker["constants"]["radius"], Tracker["constants"]["nnxo"], Tracker["constants"]["nnxo"], Tracker["constants"]["nnxo"])
mask3D = fdecimate(mask3D, Tracker["nxinit"], Tracker["nxinit"], Tracker["nxinit"], True, False)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "------>>>>EQKmeans_by_dmatrix_orien_groups <<<<--------"
log.add(msg)
msg = "total_stack: %d number_of_groups: %d nxinit: %d CTF: %s Symmetry group: %s iter_max: %d stop percentage: %f 3-D mask: %s focus mask: %s Comparison method: %s"% \
(Tracker["total_stack"], Tracker["number_of_groups"], Tracker["nxinit"], Tracker["constants"]["CTF"], \
Tracker["constants"]["symmetry"], iter_max, stopercnt, Tracker["mask3D"], Tracker["focus3D"], Tracker["constants"]["comparison_method"])
log.add(msg)
print(line, msg)
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
iter_assignment = []
for im in xrange(len(lpartids[0])):iter_assignment.append(randint(0,number_of_groups-1))# simple version
else: iter_assignment = lpartids[0]
else: iter_assignment = 0
iter_assignment = wrap_mpi_bcast(iter_assignment, Blockdata["main_node"])
proc_list = [[None, None] for iproc in xrange(Blockdata["nproc"])]
for iproc in xrange(Blockdata["nproc"]):
iproc_image_start, iproc_image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], iproc)
proc_list[iproc] = [iproc_image_start, iproc_image_end]
compute_noise(Tracker["nxinit"])
cdata, rdata = downsize_data_for_sorting(original_data, preshift = True, npad = 1)# pay attentions to shifts!
srdata = precalculate_shifted_data_for_recons3D(rdata, paramstructure, Tracker["refang"], \
Tracker["rshifts"], Tracker["delta"], Tracker["avgnorm"], Tracker["nxinit"], Tracker["constants"]["nnxo"], Tracker["nosmearing"], norm_per_particle)
del rdata
while total_iter= changed_nptls:
best_score = changed_nptls
best_assignment = copy.copy(iter_assignment)
if changed_nptls < stopercnt and total_iter <=10:stopercnt = changed_nptls/2. # reduce stop criterion to gain improvement in clustering
stopercnt = max(stopercnt, Tracker["constants"]["stop_eqkmeans_percentage"]) # But not exceed the specified number
iter +=1
total_iter +=1
if changed_nptls < stopercnt: break
if changed_nptls 15.0: require_check_setting = True
if(Blockdata["myid"] == Blockdata["main_node"]):
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if best_score > Tracker["constants"]["stop_eqkmeans_percentage"]:
msg ="EQKmeans premature stop with changed particles ratio %f and image size %d"%(best_score,Tracker["nxinit"])
premature = 1
else: msg = "EQKmeans mature stop with changed particles ratio %f within %d iterations and actually used stop percentage is %f"%(\
best_score, total_iter, stopercnt)
print(line, msg)
log.add(msg)
Tracker["partition"], ali3d_params_list = parsing_sorting_params(partids, res_sort3d)
write_text_row(Tracker["partition"], os.path.join(Tracker["directory"],"list.txt"))
shutil.rmtree(os.path.join(Tracker["directory"], "tempdir"))
if clean_volumes:
for jter in xrange(total_iter):
for igroup in xrange(Tracker["number_of_groups"]): os.remove(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(igroup,jter)))
else:Tracker["partition"] = 0
Tracker["partition"] = wrap_mpi_bcast(Tracker["partition"], Blockdata["main_node"])
premature = wrap_mpi_bcast(premature, Blockdata["main_node"])
if require_check_setting:
if(Blockdata["myid"] == Blockdata["main_node"]): print("Too large changed particles, and the sorting settings, such as img_per_grp requires a check")
return Tracker["partition"], premature
### various reading data
### 1
def get_shrink_data_sorting(partids, partstack, return_real = False, preshift = True, apply_mask = True, npad = 1):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
# The read data is properly distributed among MPI threads.
# 10142015 --- preshift is set to True when doing 3-D sorting.
# chunk_id are set when data is read in
global Tracker, Blockdata
from utilities import wrap_mpi_bcast, read_text_row
from fundamentals import resample, fshift
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line,"get_shrink_data_sorting")
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
shrinkage = Tracker["nxinit"]/float(Tracker["constants"]["nnxo"])
radius = int(Tracker["constants"]["radius"] * shrinkage +0.5)
if( Blockdata["myid"] == Blockdata["main_node"]):
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
lpartids = lpartids[0]
groupids = len(lpartids)*[-1]
else:
groupids = lpartids[0]
lpartids = lpartids[1]
else:
lpartids = 0
groupids = 0
lpartids = wrap_mpi_bcast(lpartids, Blockdata["main_node"])
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"])
Tracker["total_stack"] = len(lpartids)
if(Blockdata["myid"] == Blockdata["main_node"]): partstack = read_text_row(partstack)
else: partstack = 0
partstack = wrap_mpi_bcast(partstack, Blockdata["main_node"])
if(Tracker["total_stack"] < Blockdata["nproc"]): ERROR("Wrong MPI settings!", "get_shrink_data_sorting", 1, Blockdata["myid"])
else: image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
lpartids = lpartids[image_start:image_end]
groupids = groupids[image_start:image_end]
nima = image_end - image_start
data = [None]*nima
for im in xrange(nima):
data[im] = get_im(Tracker["constants"]["orgstack"], lpartids[im])
try: phi, theta, psi, sx, sy, chunk_id, particle_group_id = partstack[lpartids[im]][0], partstack[lpartids[im]][1],\
partstack[lpartids[im]][2], partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], partstack[lpartids[im]][6]
except: phi, theta, psi, sx, sy, chunk_id, particle_group_id = partstack[lpartids[im]][0], partstack[lpartids[im]][1],\
partstack[lpartids[im]][2], partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], -1
if preshift:# always true
data[im] = fshift(data[im],sx,sy)
sx = 0.0
sy = 0.0
st = Util.infomask(data[im], mask2D, False)
data[im] -= st[0]
data[im] /= st[1]
if apply_mask: data[im] = cosinemask(data[im],radius = Tracker["constants"]["radius"])
# FT
data[im] = fft(data[im])
nny = data[im].get_ysize()
if Tracker["constants"]["CTF"]:
ctf_params = data[im].get_attr("ctf")
data[im] = fdecimate(data[im], Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
if return_real : data[im] = fft(data[im])
else:
ctf_params = data[im].get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
data[im] = fdecimate(data[im], nxinit*npad, nxinit*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
data[im].set_attr('apix', apix/shrinkage)
if not return_real: data[im].set_attr("padffted",1)
data[im].set_attr("npad",npad)
set_params_proj(data[im],[phi, theta, psi, 0.0, 0.0])
data[im].set_attr("chunk_id",chunk_id)
data[im].set_attr("group",groupids[im])
data[im].set_attr("particle_group", particle_group_id)
if Tracker["applybckgnoise"]:
data[im].set_attr("bckgnoise", Blockdata["bckgnoise"][particle_group_id])
data[im].set_attr("qt", float(Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"]))
else: data[im].set_attr("bckgnoise", Blockdata["bckgnoise"]) # constant list
return data
###2
def get_shrink_data_sorting_smearing(partids, partstack, return_real = False, preshift = True, apply_mask = True, npad = 1):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
# The read data is properly distributed among MPI threads.
# 10142015 --- preshift is set to True when doing 3-D sorting.
# chunk_id are set when data is read in
global Tracker, Blockdata
from fundamentals import resample, fshift
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line,"get_shrink_data_sorting")
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
shrinkage = Tracker["nxinit"]/float(Tracker["constants"]["nnxo"])
radius = int(Tracker["constants"]["radius"] * shrinkage +0.5)
if( Blockdata["myid"] == Blockdata["main_node"]):
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
lpartids = lpartids[0]
groupids = len(lpartids)*[-1]
else:
groupids = lpartids[0]
lpartids = lpartids[1]
else:
lpartids = 0
groupids = 0
lpartids = wrap_mpi_bcast(lpartids, Blockdata["main_node"])
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"])
Tracker["total_stack"] = len(lpartids)
if(Blockdata["myid"] == Blockdata["main_node"]): partstack = read_text_row(partstack)
else: partstack = 0
partstack = wrap_mpi_bcast(partstack, Blockdata["main_node"])
if(Tracker["total_stack"] < Blockdata["nproc"]): ERROR("Wrong MPI settings!", "get_shrink_data_sorting", 1, Blockdata["myid"])
else: image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
lpartids = lpartids[image_start:image_end]
groupids = groupids[image_start:image_end]
nima = image_end - image_start
data = [None]*nima
norm_per_particle = []
for im in xrange(nima):
data[im] = get_im(Tracker["constants"]["orgstack"], lpartids[im])
try: phi, theta, psi, sx, sy, chunk_id, particle_group_id, norm = partstack[lpartids[im]][0], partstack[lpartids[im]][1],\
partstack[lpartids[im]][2], partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], partstack[lpartids[im]][6], partstack[lpartids[im]][7]
except: phi, theta, psi, sx, sy, chunk_id, particle_group_id, norm = partstack[lpartids[im]][0], partstack[lpartids[im]][1],\
partstack[lpartids[im]][2], partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], -1, 1
if preshift:# always true
data[im] = fshift(data[im],sx,sy)
sx = 0.0
sy = 0.0
st = Util.infomask(data[im], mask2D, False)
data[im] -= st[0]
data[im] /= st[1]
if apply_mask: data[im] = cosinemask(data[im],radius = Tracker["constants"]["radius"])
# FT
data[im] = fft(data[im])
nny = data[im].get_ysize()
if Tracker["constants"]["CTF"] :
ctf_params = data[im].get_attr("ctf")
data[im] = fdecimate(data[im], Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
if return_real : data[im] = fft(data[im])
else:
ctf_params = data[im].get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
data[im] = fdecimate(data[im], nxinit*npad, nxinit*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
data[im].set_attr('apix', apix/shrinkage)
if not return_real: data[im].set_attr("padffted",1)
data[im].set_attr("npad",npad)
set_params_proj(data[im],[phi, theta, psi, 0.0, 0.0])
data[im].set_attr("chunk_id",chunk_id)
data[im].set_attr("group",groupids[im])
data[im].set_attr("particle_group", particle_group_id)
if Tracker["applybckgnoise"]:
data[im].set_attr("bckgnoise", Blockdata["bckgnoise"][particle_group_id])
data[im].set_attr("qt", float(Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"]))
else: data[im].set_attr("bckgnoise", Blockdata["bckgnoise"]) # constant list
norm_per_particle.append(norm)
return data, norm_per_particle
###3
def get_data_prep_compare_rec3d(partids, partstack, return_real = False, preshift = True, npad = 1):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
global Tracker, Blockdata
from fundamentals import resample, fshift, fft
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
from utilities import model_circle, wrap_mpi_bcast, get_im, model_blank, set_params_proj
# functions:
# read in data
# apply mask, and prepare focus projection if focus3D is specified
# return 1. cdata: data for image comparison, always in Fourier format
# 2. rdata: data for reconstruction, 4nn return real image
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line,"read_data in ")
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
shrinkage = Tracker["nxinit"]/float(Tracker["constants"]["nnxo"])
radius = int(Tracker["constants"]["radius"] * shrinkage +0.5)
if Tracker["applybckgnoise"]:
oneover = []
nnx = len(Blockdata["bckgnoise"][0])
for i in xrange(len(Blockdata["bckgnoise"])):
temp = [0.0]*nnx
for k in xrange(nnx):
if(Blockdata["bckgnoise"][i][k] > 0.0): temp[k] = 1.0/sqrt(Blockdata["bckgnoise"][i][k])
oneover.append(temp)
del temp
if( Blockdata["myid"] == Blockdata["main_node"]):
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
lpartids = lpartids[0]
groupids = len(lpartids)*[-1]
else:
groupids = lpartids[0]
lpartids = lpartids[1]
else:
lpartids = 0
groupids = 0
lpartids = wrap_mpi_bcast(lpartids, Blockdata["main_node"])
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"])
Tracker["total_stack"] = len(lpartids)
if(Blockdata["myid"] == Blockdata["main_node"]): partstack = read_text_row(partstack)
else: partstack = 0
partstack = wrap_mpi_bcast(partstack, Blockdata["main_node"])
if(Tracker["total_stack"] < Blockdata["nproc"]):
ERROR("number of processors in use is larger than the total number of particles", \
"get_data_and_prep", 1, Blockdata["myid"])
else: image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
lpartids = lpartids[image_start:image_end]
groupids = groupids[image_start:image_end]
if Tracker["focus3D"]: # focus mask is applied
if Blockdata["myid"] == Blockdata["main_node"]:
focus3d = get_im(Tracker["focus3D"])
focus3d_nx = focus3d.get_xsize()
if focus3d_nx != Tracker["constants"]["nnxo"]: # So the decimated focus volume can be directly used
focus3d = resample(focus3d, float(Tracker["constants"]["nnxo"])/float(focus3d_nx))
else: focus3d = model_blank(Tracker["constants"]["nnxo"], Tracker["constants"]["nnxo"], Tracker["constants"]["nnxo"])
bcast_EMData_to_all(focus3d, Blockdata["myid"], Blockdata["main_node"])
focus3d = prep_vol(focus3d, 1, 1)
# Preprocess the data
# mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
nima = image_end - image_start
cdata = [None]*nima
rdata = [None]*nima
for im in xrange(nima):
image = get_im(Tracker["constants"]["orgstack"], lpartids[im])
try: phi, theta, psi, sx, sy, chunk_id, particle_group_id = partstack[lpartids[im]][0], partstack[lpartids[im]][1], partstack[lpartids[im]][2], \
partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], partstack[lpartids[im]][6]
except: phi, theta, psi, sx, sy, chunk_id, particle_group_id = partstack[lpartids[im]][0], partstack[lpartids[im]][1], partstack[lpartids[im]][2], \
partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], -1
if preshift:# always true
image = fshift(image,sx,sy)
sx = 0.0
sy = 0.0
st = Util.infomask(image, mask2D, False)
image -= st[0]
image /= st[1]
cimage = image.copy()
if Tracker["applybckgnoise"]:
if Tracker["applymask"]:
if Tracker["constants"]["hardmask"]: cimage = cosinemask(cimage, radius = Tracker["constants"]["radius"])
else:
bckg = model_gauss_noise(1.0,Tracker["constants"]["nnxo"]+2,Tracker["constants"]["nnxo"])
bckg.set_attr("is_complex",1)
bckg.set_attr("is_fftpad",1)
bckg = fft(filt_table(bckg, oneover[particle_group_id]))
# Normalize bckg noise in real space, only region actually used.
st = Util.infomask(bckg, mask2D, False)
bckg -= st[0]
bckg /= st[1]
cimage = cosinemask(cimage,radius = Tracker["constants"]["radius"], bckg = bckg)
else:
if Tracker["applymask"]: cimage = cosinemask(cimage, radius = Tracker["constants"]["radius"])
# FT
image = fft(image)
cimage = fft(cimage)
if Tracker["constants"]["CTF"] :
ctf_params = image.get_attr("ctf")
image = fdecimate(image, Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
cimage = fdecimate(cimage, Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
ctf_params.apix = ctf_params.apix/shrinkage
image.set_attr('ctf', ctf_params)
cimage.set_attr('ctf', ctf_params)
image.set_attr('ctf_applied', 0)
cimage.set_attr('ctf_applied', 0)
if return_real:image = fft(image)
else:
ctf_params = image.get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
image.set_attr('ctf', ctf_params)
image.set_attr('ctf_applied', 0)
cimage.set_attr('ctf', ctf_params)
cimage.set_attr('ctf_applied', 0)
image = fdecimate(image, nxinit*npad, nxinit*npad, 1, True, False)
cimage = fdecimate(cimage, nxinit*npad, nxinit*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
image.set_attr('apix', apix/shrinkage)
cimage.set_attr('apix', apix/shrinkage)
cimage.set_attr("padffted",1)
cimage.set_attr("npad", npad)
if not return_real:
image.set_attr("padffted",1)
image.set_attr("npad", npad)
set_params_proj(image,[phi, theta, psi, 0.0, 0.0])
image.set_attr("chunk_id", chunk_id)
image.set_attr("group", groupids[im])
image.set_attr("particle_group", particle_group_id)
set_params_proj(cimage,[phi, theta, psi, 0.0, 0.0])
cimage.set_attr("chunk_id", chunk_id)
cimage.set_attr("group", groupids[im])
cimage.set_attr("particle_group", particle_group_id)
rdata[im] = image
cdata[im] = cimage
if Tracker["applybckgnoise"]:
rdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"][particle_group_id])
if Tracker["constants"]["comparison_method"] == "cross": Util.mulclreal(cdata[im], Blockdata["unrolldata"][particle_group_id])
if Tracker["focus3D"]:
cdata[im] = fft(binarize(prgl(focus3d, [phi, theta, psi, 0.0, 0.0], 1, True), 1)*fft(cdata[im]))
if Tracker["constants"]["CTF"]: cdata[im].set_attr("ctf", rdata[im].get_attr("ctf"))
cdata[im].set_attr("is_complex",0)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line,"reading data finishes")
return cdata, rdata
#####4
def get_shrink_data_final(nxinit, procid, original_data = None, oldparams = None, \
return_real = False, preshift = False, apply_mask = True, nonorm = False, npad = 1):
global Tracker, Blockdata
"""
This function will read from stack a subset of images specified in partids
and assign to them parameters from partstack with optional CTF application and shifting of the data.
So, the lengths of partids and partstack are the same.
The read data is properly distributed among MPI threads.
Flow of data:
1. Read images, if there is enough memory, keep them as original_data.
2. Read current params
3. Apply shift
4. Normalize outside of the radius
5. Do noise substitution and cosine mask. (Optional?)
6. Shrink data.
7. Apply CTF.
"""
#from fundamentals import resample
from utilities import get_im, model_gauss_noise, set_params_proj, get_params_proj
from fundamentals import fdecimate, fshift, fft
from filter import filt_ctf, filt_table
from applications import MPI_start_end
from math import sqrt
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
nima = len(original_data)
shrinkage = nxinit/float(Tracker["constants"]["nnxo"])
# Note these are in Fortran notation for polar searches
#txm = float(nxinit-(nxinit//2+1) - radius -1)
#txl = float(2 + radius - nxinit//2+1)
radius = int(Tracker["constants"]["radius"]*shrinkage + 0.5)
txm = float(nxinit-(nxinit//2+1) - radius)
txl = float(radius - nxinit//2+1)
if Blockdata["bckgnoise"] :
oneover = []
nnx = Blockdata["bckgnoise"][0].get_xsize()
for i in xrange(len(Blockdata["bckgnoise"])):
temp = [0.0]*nnx
for k in xrange(nnx):
if( Blockdata["bckgnoise"][i].get_value_at(k) > 0.0): temp[k] = 1.0/sqrt(Blockdata["bckgnoise"][i].get_value_at(k))
oneover.append(temp)
del temp
Blockdata["accumulatepw"][procid] = [None]*nima
data = [None]*nima
for im in xrange(nima):
phi, theta, psi, sx, sy, wnorm = oldparams[im][0], oldparams[im][1], oldparams[im][2], oldparams[im][3], oldparams[im][4], oldparams[im][7]
if preshift:
sx = int(round(sx))
sy = int(round(sy))
data[im] = cyclic_shift(original_data[im],sx,sy)
# Put rounded shifts on the list, note image has the original floats - check whether it may cause problems
oldparams[im][3] = sx
oldparams[im][4] = sy
sx = 0.0
sy = 0.0
else: data[im] = original_data[im].copy()
st = Util.infomask(data[im], mask2D, False)
data[im] -= st[0]
data[im] /= st[1]
if data[im].get_attr_default("bckgnoise", None) : data[im].delete_attr("bckgnoise")
# Do bckgnoise if exists
if Blockdata["bckgnoise"]:
if apply_mask:
if Tracker["constants"]["hardmask"]:
data[im] = cosinemask(data[im],radius = Tracker["constants"]["radius"])
else:
bckg = model_gauss_noise(1.0,Tracker["constants"]["nnxo"]+2,Tracker["constants"]["nnxo"])
bckg.set_attr("is_complex",1)
bckg.set_attr("is_fftpad",1)
bckg = fft(filt_table(bckg, oneover[data[im].get_attr("particle_group")]))
# Normalize bckg noise in real space, only region actually used.
st = Util.infomask(bckg, mask2D, False)
bckg -= st[0]
bckg /= st[1]
data[im] = cosinemask(data[im],radius = Tracker["constants"]["radius"], bckg = bckg)
else:
# if no bckgnoise, do simple masking instead
if apply_mask: data[im] = cosinemask(data[im],radius = Tracker["constants"]["radius"] )
# resample will properly adjusts shifts and pixel size in ctf
#data[im] = resample(data[im], shrinkage)
# return Fourier image
#if npad> 1: data[im] = pad(data[im], Tracker["constants"]["nnxo"]*npad, Tracker["constants"]["nnxo"]*npad, 1, 0.0)
# Apply varadj
if not nonorm: Util.mul_scalar(data[im], Tracker["avgvaradj"][procid]/wnorm)
# FT
data[im] = fft(data[im])
sig = Util.rotavg_fourier( data[im] )
Blockdata["accumulatepw"][procid][im] = sig[len(sig)//2:]+[0.0]
if Tracker["constants"]["CTF"] :
data[im] = fdecimate(data[im], nxinit*npad, nxinit*npad, 1, False, False)
ctf_params = original_data[im].get_attr("ctf")
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
if return_real: data[im] = fft(data[im])
else:
ctf_params = original_data[im].get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
data[im].set_attr('ctf', ctf_params)
data[im].set_attr('ctf_applied', 0)
data[im] = fdecimate(data[im], nxinit*npad, nxinit*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
data[im].set_attr('apix', apix/shrinkage)
# We have to make sure the shifts are within correct range, shrinkage or not
set_params_proj(data[im],[phi,theta,psi,max(min(sx*shrinkage,txm),txl),max(min(sy*shrinkage,txm),txl)])
if not return_real: data[im].set_attr("padffted",1)
data[im].set_attr("npad",npad)
if Blockdata["bckgnoise"]:
temp = Blockdata["bckgnoise"][data[im].get_attr("particle_group")]
### Do not adjust the values, we try to keep everything in the same Fourier values.
data[im].set_attr("bckgnoise", [temp[i] for i in xrange(temp.get_xsize())])
return data
###5
def read_data_for_sorting(partids, partstack, previous_partstack):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
global Tracker, Blockdata
from fundamentals import resample, fshift
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
from utilities import wrap_mpi_bcast, read_text_row, get_im, set_params_proj
# functions:
# read in data
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]):print(line, "read_data for sorting ")
if( Blockdata["myid"] == Blockdata["main_node"]):
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
lpartids = lpartids[0]
groupids = len(lpartids)*[-1]
else:
groupids = lpartids[0]
lpartids = lpartids[1]
else:
lpartids = 0
groupids = 0
lpartids = wrap_mpi_bcast(lpartids, Blockdata["main_node"])
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"])
Tracker["total_stack"] = len(lpartids)
if(Blockdata["myid"] == Blockdata["main_node"]): partstack = read_text_row(partstack)
else: partstack = 0
partstack = wrap_mpi_bcast(partstack, Blockdata["main_node"])
if(Blockdata["myid"] == Blockdata["main_node"]): previous_partstack = read_text_row(previous_partstack)
else: previous_partstack = 0
previous_partstack = wrap_mpi_bcast(previous_partstack, Blockdata["main_node"])
if(Tracker["total_stack"] < Blockdata["nproc"]): ERROR("number of processors in use is larger than the total number of particles", \
"get_data_and_prep", 1, Blockdata["myid"])
else: image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
lpartids = lpartids[image_start:image_end]
groupids = groupids[image_start:image_end]
nima = image_end - image_start
data = [None]*nima
norm_per_particle = [ None for im in xrange(nima)]
#print(Tracker["constants"]["orgstack"])
for im in xrange(nima):
image = get_im(Tracker["constants"]["orgstack"], lpartids[im])
#print(im, Blockdata["myid"])
try: phi, theta, psi, sx, sy, chunk_id, particle_group_id, mnorm = partstack[lpartids[im]][0], \
partstack[lpartids[im]][1], partstack[lpartids[im]][2], \
partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], \
partstack[lpartids[im]][6], partstack[lpartids[im]][7]
except:
phi, theta, psi, sx, sy, chunk_id, particle_group_id, mnorm = partstack[lpartids[im]][0], \
partstack[lpartids[im]][1], partstack[lpartids[im]][2], \
partstack[lpartids[im]][3], partstack[lpartids[im]][4], partstack[lpartids[im]][5], -1, 1.
sx1, sy1 = previous_partstack[lpartids[im]][3], previous_partstack[lpartids[im]][4]
set_params_proj(image,[phi, theta, psi, 0.0, 0.0])
image.set_attr("chunk_id", chunk_id)
image.set_attr("group", groupids[im])
image.set_attr("particle_group", particle_group_id)
#image.set_attr("mnorm", mnorm)
image.set_attr("previous_shifts", [sx1, sy1])
image.set_attr("current_shifts", [sx, sy])
norm_per_particle[im] = mnorm
data[im] = image
return data, norm_per_particle
###6 read paramstructure
def read_paramstructure_for_sorting(partids, paramstructure_dict_file, paramstructure_dir):
global Tracker, Blockdata
from utilities import read_text_row, read_text_file, wrap_mpi_bcast
from applications import MPI_start_end
if( Blockdata["myid"] == Blockdata["main_node"]):lcore = read_text_file(partids, -1)
else: lcore = 0
lcore = wrap_mpi_bcast(lcore, Blockdata["main_node"])
if len(lcore) == 1: lcore = lcore[0]
else: lcore = lcore[1]
psize = len(lcore)
oldparamstructure = []
im_start, im_end = MPI_start_end(psize, Blockdata["nproc"], Blockdata["myid"])
lcore = lcore[im_start:im_end]
nima = len(lcore)
if( Blockdata["myid"] == Blockdata["main_node"]): tmp_list = read_text_row(paramstructure_dict_file)
else: tmp_list = 0
tmp_list = wrap_mpi_bcast(tmp_list, Blockdata["main_node"])
pdict = {}
for im in xrange(len(lcore)):pdict[im] = tmp_list[lcore[im]]
oldparamstructure = []
nptl = 0
last_old_paramstructure_file = None
while nptl"
if( Blockdata["myid"] == Blockdata["main_node"]):
if not os.path.exists(Tracker["paramstructure_dir"]):
os.mkdir(os.path.join(Tracker["constants"]["masterdir"], "main%03d"%selected_iteration))
os.mkdir(Tracker["paramstructure_dir"])
Tracker["refang"] = read_text_row(os.path.join(old_refinement_iter_directory, "refang.txt"))
if( Blockdata["myid"] == Blockdata["main_node"]): write_text_row(Tracker["refang"], os.path.join(Tracker["directory"], "refang.txt"))
Tracker["rshifts"] = read_text_row(os.path.join(old_refinement_iter_directory, "rshifts.txt"))
if( Blockdata["myid"] == Blockdata["main_node"]): write_text_row(Tracker["refang"], os.path.join(Tracker["directory"], "rshifts.txt"))
my_last_params = read_text_file(os.path.join(old_refinement_previous_iter_directory, "params_%03d.txt"%(selected_iteration-1)), -1)
my_parstack = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "refinement_parameters.txt"), -1)
if( Blockdata["myid"] == Blockdata["main_node"]):
my_parstack[3:5]= my_last_params[3:5]
write_text_file(my_parstack, os.path.join(Tracker["constants"]["masterdir"], "previous_refinement_parameters.txt"))
Tracker["previous_parstack"] = os.path.join(Tracker["constants"]["masterdir"], "previous_refinement_parameters.txt")
nproc_previous = 0
procid = 0
old_refinement_iter_dir = os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iteration)
if Blockdata["myid"] == Blockdata["main_node"]:
while os.path.exists(os.path.join(old_refinement_iter_dir,"oldparamstructure","oldparamstructure_%01d_%03d_%03d.json"%(procid, nproc_previous, selected_iteration))):
nproc_previous += 1
nproc_previous = bcast_number_to_all(nproc_previous, Blockdata["main_node"], MPI_COMM_WORLD)
Blockdata["nproc_previous"] = nproc_previous
oldparamstructure =[[], []]
local_dict = {}
for procid in xrange(2):
if( Blockdata["myid"] == Blockdata["main_node"]): lcore = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "chunk_%d.txt"%procid))
else: lcore = 0
lcore = wrap_mpi_bcast(lcore, Blockdata["main_node"], MPI_COMM_WORLD)
psize = len(lcore)
oldparamstructure[procid] = []
im_start, im_end = MPI_start_end(psize, Blockdata["nproc"], Blockdata["myid"])
local_lcore = lcore[im_start:im_end]
istart_old_proc_id = -1
iend_old_proc_id = -1
plist = []
for iproc_old in xrange(nproc_previous):
im_start_old, im_end_old = MPI_start_end(psize, nproc_previous, iproc_old)
if (im_start>= im_start_old) and im_start <=im_end_old: istart_old_proc_id = iproc_old
if (im_end>= im_start_old) and im_end <=im_end_old: iend_old_proc_id = iproc_old
plist.append([im_start_old, im_end_old])
ptl_on_this_cpu = im_start
nptl_total = 0
for iproc_index_old in xrange(istart_old_proc_id, iend_old_proc_id+1):
fout = open(os.path.join(Tracker["constants"]["refinement_dir"],"main%03d"%selected_iteration, "oldparamstructure", "oldparamstructure_%01d_%03d_%03d.json"%(procid, \
iproc_index_old, selected_iteration)),'r')
oldparamstructure_on_old_cpu = convert_json_fromunicode(json.load(fout))
fout.close()
mlocal_id_on_old = ptl_on_this_cpu - plist[iproc_index_old][0]
while (mlocal_id_on_old>>>
def downsize_data_for_sorting(original_data, return_real = False, preshift = True, npad = 1):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
global Tracker, Blockdata
from fundamentals import resample, fshift, cyclic_shift
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
# functions:
# read in data
# apply mask, and prepare focus projection if focus3D is specified
# return 1. cdata: data for image comparison, always in Fourier format
# 2. rdata: data for reconstruction, 4nn return real image
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line,"--->>>downsize_data_for_sorting starts<<<---- ")
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
shrinkage = Tracker["nxinit"]/float(Tracker["constants"]["nnxo"])
radius = int(Tracker["constants"]["radius"] * shrinkage +0.5)
if Tracker["applybckgnoise"]:
oneover = []
nnx = len(Blockdata["bckgnoise"][0])
for i in xrange(len(Blockdata["bckgnoise"])):
temp = [0.0]*nnx
for k in xrange(nnx):
if(Blockdata["bckgnoise"][i][k] > 0.0): temp[k] = 1.0/sqrt(Blockdata["bckgnoise"][i][k])
oneover.append(temp)
del temp
if Tracker["focus3D"]: # focus mask is applied
if Blockdata["myid"] == Blockdata["main_node"]:
focus3d = get_im(Tracker["focus3D"])
focus3d_nx = focus3d.get_xsize()
if focus3d_nx != Tracker["nxinit"]: # So the decimated focus volume can be directly used
focus3d = resample(focus3d, float(Tracker["nxinit"])/float(focus3d_nx))
else: focus3d = model_blank(Tracker["nxinit"], Tracker["nxinit"], Tracker["nxinit"])
bcast_EMData_to_all(focus3d, Blockdata["myid"], Blockdata["main_node"])
focus3d = prep_vol(focus3d, 1, 1)
# Preprocess the data
nima = len(original_data)
cdata = [None]*nima
rdata = [None]*nima
for im in xrange(nima):
image = original_data[im].copy()
chunk_id = image.get_attr("chunk_id")
try: group_id = image.set_attr("group", groupids[im])
except: pass
particle_group_id = image.get_attr("particle_group")
phi,theta,psi,s2x,s2y = get_params_proj(image, xform = "xform.projection")
[sx, sy] = image.get_attr("previous_shifts")
[sx1, sy1] = image.get_attr("current_shifts")
rimage = cyclic_shift(image, int(round(sx)), int(round(sy)))
cimage = fshift(image, sx1, sy1)
st = Util.infomask(rimage, mask2D, False)
rimage -= st[0]
rimage /= st[1]
st = Util.infomask(cimage, mask2D, False)
cimage -= st[0]
cimage /= st[1]
if Tracker["applybckgnoise"]:
if Tracker["applymask"]:
if Tracker["constants"]["hardmask"]: cimage = cosinemask(cimage, radius = Tracker["constants"]["radius"])
else:
bckg = model_gauss_noise(1.0,Tracker["constants"]["nnxo"]+2,Tracker["constants"]["nnxo"])
bckg.set_attr("is_complex",1)
bckg.set_attr("is_fftpad",1)
bckg = fft(filt_table(bckg, oneover[particle_group_id]))
# Normalize bckg noise in real space, only region actually used.
st = Util.infomask(bckg, mask2D, False)
bckg -= st[0]
bckg /= st[1]
cimage = cosinemask(cimage,radius = Tracker["constants"]["radius"], bckg = bckg)
else:
if Tracker["applymask"]:cimage = cosinemask(cimage, radius = Tracker["constants"]["radius"])
else: pass
# FT
rimage = fft(rimage)
cimage = fft(cimage)
if Tracker["constants"]["CTF"] :
ctf_params = rimage.get_attr("ctf")
rimage = fdecimate(rimage, Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
cimage = fdecimate(cimage, Tracker["nxinit"]*npad, Tracker["nxinit"]*npad, 1, False, False)
ctf_params.apix = ctf_params.apix/shrinkage
rimage.set_attr('ctf', ctf_params)
cimage.set_attr('ctf', ctf_params)
rimage.set_attr('ctf_applied', 0)
cimage.set_attr('ctf_applied', 0)
if return_real : rimage = fft(rimage)
else:
ctf_params = rimage.get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
rimage.set_attr('ctf', ctf_params)
rimage.set_attr('ctf_applied', 0)
cimage.set_attr('ctf', ctf_params)
cimage.set_attr('ctf_applied', 0)
rimage = fdecimate(rimage, nxinit*npad, nxinit*npad, 1, True, False)
cimage = fdecimate(cimage, nxinit*npad, nxinit*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
rimage.set_attr('apix', apix/shrinkage)
cimage.set_attr('apix', apix/shrinkage)
cimage.set_attr("padffted",1)
cimage.set_attr("npad", npad)
if not return_real:
rimage.set_attr("padffted",1)
rimage.set_attr("npad", npad)
set_params_proj(rimage,[phi, theta, psi, 0.0, 0.0])
rimage.set_attr("chunk_id", chunk_id)
#image.set_attr("group", groupids[im])
rimage.set_attr("particle_group", particle_group_id)
set_params_proj(cimage,[phi, theta, psi, 0.0, 0.0])
cimage.set_attr("chunk_id", chunk_id)
#cimage.set_attr("group", groupids[im])
cimage.set_attr("particle_group", particle_group_id)
rdata[im] = rimage
cdata[im] = cimage
if Tracker["applybckgnoise"]:
rdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"][particle_group_id])
if Tracker["constants"]["comparison_method"] == "cross":Util.mulclreal(cdata[im], Blockdata["unrolldata"][particle_group_id])
else:
rdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"])
cdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"])
if Tracker["focus3D"]:
cdata[im] = fft(binarize(prgl(focus3d, [phi, theta, psi, 0.0, 0.0], 1, True), 1)*fft(cdata[im]))
if Tracker["constants"]["CTF"]: cdata[im].set_attr("ctf", rdata[im].get_attr("ctf"))
cdata[im].set_attr("is_complex",0)
return cdata, rdata
##<<<----for 3D----->>>>
def downsize_data_for_rec3D(original_data, particle_size, return_real = False, npad = 1):
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack with optional CTF application and shifting of the data.
# So, the lengths of partids and partstack are the same.
global Tracker, Blockdata
from fundamentals import resample, fshift
from filter import filt_ctf
from applications import MPI_start_end
from EMAN2 import Region
# functions:
# read in data
# apply mask, and prepare focus projection if focus3D is specified
# return 1. cdata: data for image comparison, always in Fourier format
# 2. rdata: data for reconstruction, 4nn return real image
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]): print(line, "--->>>downsize_data_for_rec3D<<<---- ")
nima = len(original_data)
rdata = [None]*nima
mask2D = model_circle(Tracker["constants"]["radius"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
shrinkage = particle_size/float(Tracker["constants"]["nnxo"])
radius = int(Tracker["constants"]["radius"] * shrinkage +0.5)
for im in xrange(nima):
image = original_data[im].copy()
chunk_id = image.get_attr("chunk_id")
try: particle_group_id = image.get_attr("particle_group")
except: particle_group_id = -1
phi,theta,psi,s2x,s2y = get_params_proj(image, xform = "xform.projection")
[sx, sy] = image.get_attr("previous_shifts") # always for rec3D
if Tracker["nosmearing"]: image = fshift(image, s2x, s2y)
else: image = cyclic_shift(image, int(round(sx)), int(round(sy)))
st = Util.infomask(image, mask2D, False)
image -= st[0]
image /= st[1]
image = fft(image)
if Tracker["constants"]["CTF"]:
ctf_params = image.get_attr("ctf")
image = fdecimate(image, particle_size*npad, particle_size*npad, 1, False, False)
ctf_params.apix = ctf_params.apix/shrinkage
image.set_attr('ctf', ctf_params)
image.set_attr('ctf_applied', 0)
else:
ctf_params = image.get_attr_default("ctf", False)
if ctf_params:
ctf_params.apix = ctf_params.apix/shrinkage
image.set_attr('ctf', ctf_params)
image.set_attr('ctf_applied', 0)
image = fdecimate(image, particle_size*npad, particle_size*npad, 1, True, False)
apix = Tracker["constants"]["pixel_size"]
image.set_attr('apix', apix/shrinkage)
if not return_real:
image.set_attr("padffted",1)
image.set_attr("npad", npad)
image.set_attr("chunk_id", chunk_id)
image.set_attr("particle_group", particle_group_id)
set_params_proj(image,[phi, theta, psi, 0.0, 0.0])
rdata[im] = image
if Tracker["applybckgnoise"]: rdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"][rdata[im].get_attr("particle_group")])
else: rdata[im].set_attr("bckgnoise", Blockdata["bckgnoise"])
return rdata
### end of downsize
###<<<--- comparison
def compare_two_images_eucd(data, ref_vol):
global Tracker, Blockdata
from filter import filt_tophatl
from math import sqrt
peaks = len(data)*[None]
ny = data[0].get_ysize()
ref_vol = prep_vol(ref_vol, npad = 2, interpolation_method = 1)
ctfs = [ctf_img_real(ny, q.get_attr('ctf')) for q in data]
qt = float(Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"])
for im in xrange(len(data)):
phi, theta, psi, s2x, s2y = get_params_proj(data[im], xform = "xform.projection")
rtemp = prgl(ref_vol,[phi, theta, psi, 0.0,0.0], 1, False)
rtemp.set_attr("is_complex",0)
if data[im].get_attr("is_complex") ==1: data[im].set_attr("is_complex",0)
if Tracker["applybckgnoise"]:
peaks[im] = -Util.sqed(data[im], rtemp, ctfs[im], Blockdata["unrolldata"][data[im].get_attr("particle_group")])/qt
else: peaks[im] = -Util.sqed(data[im], rtemp, ctfs[im], Blockdata["unrolldata"])/qt
return peaks
#
def compare_two_images_cross(data, ref_vol):
global Tracker, Blockdata
from filter import filt_tophatl
from math import sqrt
ny = data[0].get_ysize()
peaks = len(data)*[None]
volft = prep_vol(ref_vol, 2, 1)
ctfs = [None for im in xrange(len(data))]
for im in xrange(len(data)): ctfs[im] = ctf_img_real(ny, data[im].get_attr('ctf'))
# Ref is in reciprocal space
for im in xrange(len(data)):
phi, theta, psi, s2x, s2y = get_params_proj(data[im], xform = "xform.projection")
ref = prgl( volft, [phi, theta, psi, 0.0, 0.0], 1, False)
Util.mulclreal(ref, ctfs[im])
ref.set_attr("is_complex", 0)
ref.set_value_at(0,0,0.0)
nrmref = sqrt(Util.innerproduct(ref, ref, None))
if data[im].get_attr("is_complex") ==1: data[im].set_attr("is_complex",0)
if not Tracker["focus3D"]:
if Tracker["applybckgnoise"]: peak = Util.innerproduct(ref, data[im], Blockdata["unrolldata"][data[im].get_attr("particle_group")])
else: peak = Util.innerproduct(ref, data[im], None)
peaks[im] = peak/nrmref
else: peaks[im] = Util.innerproduct(ref, data[im], None)/nrmref
return peaks
###<<<---various utilities
def clusters_to_plist(clusters, pall):
# clusters contains the original ids
pdict = {}
plist = []
qlist = []
for igrp in xrange(len(clusters)):
clusters[igrp].tolist()
for a in clusters[igrp]:
pdict[pall[a]] = igrp
plist.append(pall[a])
plist = sorted(plist)
assignment = [ None for im in xrange(len(plist))]
for im in xrange(len(plist)):
assignment[im] = pdict[plist[im]]
qlist.append([pdict[plist[im]], plist[im]])
a = set(pall)
b = set(plist)
unaccounted = list(a.difference(b))
return [assignment, plist], qlist, unaccounted
def create_nrandom_lists(partids):
# the second column denotes orignal particle IDs
# the first column is randomized group ID
global Tracker, Blockdata
import copy
import random
from utilities import wrap_mpi_bcast, read_text_file, write_text_file
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["random_assignment"] = []
data_list = read_text_file(partids, -1)
if len(data_list)==1: Tracker["sorting_data_list"]= data_list[0]
else: Tracker["sorting_data_list"]= data_list[1]
if Tracker["constants"]["seed"] == -1: random.seed()
else: random.seed(Tracker["constants"]["seed"])
Tracker["indep_runs_list"] = []
group_size = len(Tracker["sorting_data_list"])//Tracker["number_of_groups"]
for index_of_random in xrange(Tracker["constants"]["indep_runs"]):
particle_dict = {}
ll = copy.deepcopy(Tracker["sorting_data_list"])
random.shuffle(ll)
group_list = []
for index_of_groups in xrange(Tracker["number_of_groups"]):
if index_of_groups != Tracker["number_of_groups"]-1:
for iparticle in ll[index_of_groups*group_size:(index_of_groups+1)*group_size]:
particle_dict[iparticle] = index_of_groups
group_list.append(index_of_groups)
else:
for iparticle in ll[index_of_groups*group_size:]:
particle_dict[iparticle] = index_of_groups
group_list.append(index_of_groups)
assignment = []
for im in xrange(len(Tracker["sorting_data_list"])):
assignment.append([particle_dict[Tracker["sorting_data_list"][im]], Tracker["sorting_data_list"][im]])
write_text_row(assignment, os.path.join(Tracker["directory"],"independent_index_%03d.txt"%index_of_random))
Tracker["random_assignment"].append(assignment)
del assignment
del ll
else:
Tracker["indep_runs_list"] = 0
Tracker["sorting_data_list"] = 0
Tracker["random_assignment"] = 0
Tracker["random_assignment"] = wrap_mpi_bcast(Tracker["random_assignment"], Blockdata["main_node"])
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"])
return
def compute_pairwise_agreement_ratio(plist, number_of_groups):
lnum = len(plist)
ptp = []
for ip in xrange(lnum):
tmp_asi = []
for ia in xrange(len(plist[ip])):tmp_asi.append(plist[ip][ia][0])
ptp.append(convertasi(tmp_asi, number_of_groups))
res_dict = {}
nc = 0
for ip in xrange(lnum - 1):
for jp in xrange(ip + 1, lnum):
newindeces, list_stable, nb_tot_objs = k_means_match_clusters_asg_new(ptp[ip], ptp[jp])
tt = 0.0
for km in xrange(len(list_stable)):
tt +=len(list_stable[km])
ratio_accounted = float(tt)/float(len(plist[ip]))*100.
res_dict[nc] = [ip, jp, ratio_accounted]
nc+=1
return res_dict
def resize_groups_from_stable_members_mpi(Accounted_on_disk, Unaccounted_on_disk):
global Tracker, Blockdata
import random
if Blockdata["myid"] == Blockdata["main_node"]:
ptl_dict = {}
accounted = read_text_file(Accounted_on_disk, -1)
number_of_groups = max(accounted[0]) + 1
groups = [[] for igrp in xrange(number_of_groups)]
unaccounted = read_text_file(Unaccounted_on_disk, -1)
for im in xrange(len(accounted[0])):
groups[accounted[0][im]].append(accounted[1][im])
ptl_dict[accounted[1][im]] = accounted[0][im]
accounted_members = sorted(accounted[1])
unaccounted = sorted(unaccounted[0])
full_list = accounted_members + unaccounted
full_list = sorted(full_list)
total_stack = len(full_list)
Tracker["number_of_groups"] = number_of_groups
group_size = int(float(total_stack)/number_of_groups)
else:
number_of_groups = 0
total_stack = 0
number_of_groups = bcast_number_to_all(number_of_groups, Blockdata["main_node"], MPI_COMM_WORLD)
total_stack = bcast_number_to_all(total_stack, Blockdata["main_node"], MPI_COMM_WORLD)
Tracker["total_stack"] = total_stack
Tracker["number_of_groups"] = number_of_groups
###-----
if Blockdata["myid"] == Blockdata["main_node"]:
assignment_list = []
for indep in xrange(1):
print("after iter %d"%indep)
unaccounted_members = copy.deepcopy(unaccounted)
new_groups = copy.deepcopy(groups)
for im in xrange(len(unaccounted_members)):
igroup = random.randrange(0, number_of_groups)
new_groups[igroup].append(unaccounted_members[im])
ptl_dict[unaccounted_members[im]] = igroup
"""
shuffle(unaccounted_members)
l1 = 0
while l1 0:new_list.append(sorted(any))
accounted_list, new_index = merge_classes_into_partition_list(new_list)
a = set(range(total_stack))
b = set(accounted_list)
unaccounted_list = sorted(list(a.difference(b)))
return accounted_list, unaccounted_list, new_index, new_list
#####
def patch_to_do_k_means_match_clusters_asg_new(ptp1, ptp2):
from statistics import k_means_match_clusters_asg_new
# patch ad hoc elements to make equal number of classes for two partitions and thus two_way comparison becomes feasible
patch_elements = []
if len(ptp1) != len(ptp2):
alist = []
blist = []
for a in ptp1:
if len(a)>0: alist.append(max(a))
for b in ptp2:
if len(b)>0: blist.append(max(b))
if len(alist)>0 and len(blist)>0:
max_number = max(max(alist), max(blist))
else: exit() # This would never happen
if len(ptp1) > len(ptp2):
ndiff = len(ptp1) - len(ptp2)
for indiff in xrange(ndiff):
l = []
l.append(max_number+indiff+1)
patch_elements.append(max_number+indiff+1)
l = array(l,"int32")
ptp2.append(l)
else:
ndiff = len(ptp2)-len(ptp1)
for indiff in xrange(ndiff):
l = []
l.append(max_number+indiff+1)
patch_elements.append(max_number + indiff + 1)
l = array(l,"int32")
ptp1.append(l)
newindeces, list_stable, nb_tot_objs = k_means_match_clusters_asg_new(ptp1, ptp2)
return newindeces, list_stable, nb_tot_objs, patch_elements
def split_partition_into_clusters(sorting_res):
# split groupids from indexes of particles
id_list = []
clusters = []
final_class_id = 0
ptp = []
for igen in xrange(len(sorting_res)):
cluster_id =[]
for im in xrange(len(sorting_res[igen])):
#id_list.append(sorting_res[igen][im][1])
if sorting_res[igen][im][0] not in cluster_id:
cluster_id.append(sorting_res[igen][im][0])
for b in cluster_id:
one_cluster = []
for a in sorting_res[igen]:
if a[0]==b: one_cluster.append(a[1])
clusters.append(one_cluster)
final_class_id +=1
return clusters
def split_partition_into_ordered_clusters(partition):
global Tracker, Blockdata
# split groupids from indexes of particles
# reindex groups
clusters = []
cluster_id = []
for im in xrange(len(partition)):
if partition[im][0] not in cluster_id:cluster_id.append(partition[im][0])
####
cluster_dict = {}
group_change_dict = {}
new_group_id = 0
for icluster in xrange(len(cluster_id)):
one_cluster = []
for a in partition:
if a[0]== icluster:
one_cluster.append(a[1])
cluster_dict[a[1]] = icluster
if len(one_cluster)>= Tracker["constants"]["minimum_grp_size"]: # clean small ones
clusters.append(one_cluster)
group_change_dict[icluster] = new_group_id
new_group_id +=1
else: group_change_dict[icluster] = -1
# create a partition list:
new_partition = []
for iptl in xrange(len(partition)):
gid = group_change_dict[cluster_dict[partition[iptl][1]]]
if gid >-1: new_partition.append([group_change_dict[cluster_dict[partition[iptl][1]]], partition[iptl][1]])
return clusters, new_partition
def prep_ptp_single(all_lists, full_list):
# full_list contains the initial input indexes
# the assignment is aligned to full_list
# convert classes into a single list ptp denoted by group id
ad_hoc_group_ID = len(all_lists)+1
ad_hoc_particle_exists = False
a = set([])
for b in all_lists: a.union(b)
c = set(full_list)
if list(a.difference(c)) !=[]: ERROR("Accounted and unaccounted in total do not match the total number of particles", "prep_ptp_single", 1, Blockdata["myid"])
else:
pdict = {}
for iclass in xrange(len(all_lists)):
for iptl in xrange(len(all_lists[iclass])): pdict[all_lists[iclass][iptl]] = iclass
assignment = []
for im in xrange(len(full_list)):
#pdict[full_list[im]]
try: group_ID = pdict[full_list[im]]
except:
group_ID = ad_hoc_group_ID
ad_hoc_particle_exists = True
assignment.append(group_ID)
if ad_hoc_particle_exists: ptp = convertasi(assignment, ad_hoc_group_ID)
else: ptp = convertasi(assignment, len(all_lists)+1)
del pdict
return ptp
def merge_original_id_lists(original_id_lists):
# merge particles lists with original ID into one list while stamped by new group id
clusters = []
all_id_list = []
for index_of_list in xrange(len(original_id_lists)):
cluster_dict ={}
for index_of_particle in xrange(len(original_id_lists[index_of_list])):
try: cluster_dict [original_id_lists[index_of_list][index_of_particle][0]].append(original_id_lists[index_of_list][index_of_particle][1])
except: cluster_dict [original_id_lists[index_of_list][index_of_particle][0]]= [original_id_lists[index_of_list][index_of_particle][1]]
all_id_list.append(original_id_lists[index_of_list][index_of_particle][1])
for a in cluster_dict: clusters.append(cluster_dict[a])
del cluster_dict
all_id_list = sorted(all_id_list)
cluster_dict = {}
for index_of_cluster in xrange(len(clusters)):
for index_of_particle in xrange(len(clusters[index_of_cluster])): cluster_dict[clusters[index_of_cluster][index_of_particle]] = index_of_cluster
final_list = []
for index_of_particle in xrange(len(all_id_list)):final_list.append([cluster_dict[all_id_list[index_of_particle]], all_id_list[index_of_particle]])
del cluster_dict
return final_list, len(clusters)
def merge_classes_into_partition_list(classes_list):
group_dict = {}
data_list = []
new_index = []
for index_of_class in xrange(len(classes_list)):
for index_of_particle in xrange(len(classes_list[index_of_class])):
data_list.append(classes_list[index_of_class][index_of_particle])
group_dict[classes_list[index_of_class][index_of_particle]] = index_of_class
data_list = sorted(data_list)
for index_of_particle in xrange(len(data_list)):new_index.append([group_dict[data_list[index_of_particle]], data_list[index_of_particle]])
del group_dict
return data_list, new_index
def get_sorting_all_params(data):
global Tracker, Blockdata
from utilities import wrap_mpi_bcast
from applications import MPI_start_end
if Blockdata["myid"] == Blockdata["main_node"]: total_attr_value_list = [[]]*Tracker["total_stack"]
else: total_attr_value_list = 0
for myproc in xrange(Blockdata["nproc"]):
attr_value_list = 0
if Blockdata["myid"] == myproc: attr_value_list = get_sorting_attr_stack(data)
attr_value_list = wrap_mpi_bcast(attr_value_list, myproc)
if Blockdata["myid"] == Blockdata["main_node"]:
image_start,image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], myproc)
total_attr_value_list = fill_in_mpi_list(total_attr_value_list, attr_value_list, image_start,image_end)
mpi_barrier(MPI_COMM_WORLD)
total_attr_value_list = wrap_mpi_bcast(total_attr_value_list, Blockdata["main_node"])
return total_attr_value_list
def get_sorting_attr_stack(data_in_core):
# get partitioned group ID and xform.projection parameters
from utilities import get_params_proj
attr_value_list = []
for idat in xrange(len(data_in_core)): attr_value_list.append([data_in_core[idat].get_attr("group"), get_params_proj(data_in_core[idat],xform = "xform.projection")])
return attr_value_list
def fill_in_mpi_list(mpi_list, data_list, index_start, index_end):
for index in xrange(index_start, index_end): mpi_list[index] = data_list[index - index_start]
return mpi_list
def parsing_sorting_params(partid, sorting_params_list):
from utilities import read_text_file
group_list = []
ali3d_params_list = []
partid_list = read_text_file(partid, -1)
if len(partid_list)==1:
for ielement in xrange(len(sorting_params_list)):
group_list.append([sorting_params_list[ielement][0], partid_list[0][ielement]])
ali3d_params_list.append(sorting_params_list[ielement][1:])
elif len(partid_list)==2:
for ielement in xrange(len(sorting_params_list)):
group_list.append([sorting_params_list[ielement][0], partid_list[1][ielement]])
ali3d_params_list.append(sorting_params_list[ielement][1:])
else: ERROR("Wrong columns", "parsing_sorting_params", 1, 0)
return group_list, ali3d_params_list
def convertasi(asig, number_of_groups):
from numpy import array
p = []
for k in xrange(number_of_groups):
l = []
for i in xrange(len(asig)):
if( asig[i]== k ): l.append(i)
l = array(l,"int32")
l.sort()
p.append(l)
return p
def extract_groups_from_partitions(partition_list, number_of_groups):
# Given multiple partitions in partition_list
ptp=[None]*len(partition_list)
for ipt in xrange(len(partition_list)):
assignment =[-1]*len(partition_list[ipt])
for index_of_particle in xrange(len(partition_list[ipt])): assignment[index_of_particle] = partition_list[ipt][index_of_particle][0]
ptp[ipt] = convertasi(assignment, number_of_groups)
org_id = []
for a in partition_list[0]:# extract org id from the first partition
org_id.append(a[1])
org_id = sorted(org_id)
return ptp, org_id
def get_res(res_curve):
fsc05 = 0
fsc143 = 0
for ifreq in xrange(1, len(res_curve)):
if res_curve[ifreq] <0.5: break
res_05 = ifreq - 1
for ifreq in xrange(1, len(res_curve)):
if res_curve[ifreq]<0.143: break
res_143 = ifreq - 1
return res_05, res_143
def extract_clusters_from_partition(partition_to_be_saved, number_of_cluster):
clusters = []
for i in xrange(number_of_cluster):
clusters.append([])
for ipar in xrange(len(partition_to_be_saved)):
[cluster_ID, original_ID] = partition_to_be_saved[ipar]
clusters[cluster_ID].append(original_ID)
for icluster in xrange(len(clusters)): clusters[icluster] = sorted(clusters[icluster])
return clusters
def update_data_partition(cdata, rdata, partids):
# update particle clustering partitions of independent EQKmeans run
global Tracker, Blockdata
from utilities import wrap_mpi_bcast
import copy
if( Blockdata["myid"] == Blockdata["main_node"]):
lpartids = read_text_file(partids, -1)
if len(lpartids) == 1:
lpartids = lpartids[0]
groupids = len(lpartids)*[-1]
else:
groupids = lpartids[0]
lpartids = lpartids[1]
else:
lpartids = 0
groupids = 0
lpartids = wrap_mpi_bcast(lpartids, Blockdata["main_node"])
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"])
assignment = copy.copy(groupids)
try: assert(Tracker["total_stack"] == len(groupids))
except: ERROR("total stack in Tracker does not agree with the one is just read in", "update_data_partition", 1, Blockdata["myid"])
image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
nima = image_end - image_start
assert(nima == len(cdata))
groupids = groupids[image_start:image_end]
for im in xrange(nima):
cdata[im].set_attr("group",groupids[im])
rdata[im].set_attr("group",groupids[im])
return assignment
def partition_data_into_orientation_groups_nompi(refa_vecs, data_vecs):
orien_assignment = [ None for im in xrange(len(data_vecs))]
for im in xrange(len(data_vecs)):
max_dist = -999.0
for jm in xrange(len(refa_vecs)):
this_dis = get_dist1(data_vecs[im], refa_vecs[jm])
if this_dis > max_dist:
max_dist = this_dis
orien_assignment[im] = jm
return orien_assignment
### dmatrix and refangles partition
def get_dist1(vec1, vec2):
sum_dot = 0.0
for icomp in xrange(len(vec1)): sum_dot +=vec1[icomp]*vec2[icomp]
return sum_dot
def find_neighborhood(refa_vecs, minor_groups):
matched_oriens = [ [None, None] for i in xrange(len(minor_groups))]
for iproj in xrange(len(minor_groups)):
max_value = -999.0
for jproj in xrange(len(refa_vecs)):
if jproj not in minor_groups:
this_dis = get_dist1(refa_vecs[minor_groups[iproj]], refa_vecs[jproj])
if this_dis > max_value:
max_value = this_dis
matched_oriens[iproj] = [minor_groups[iproj], jproj]
return matched_oriens
def reassign_ptls_in_orien_groups(assigned_ptls_in_groups, matched_pairs):
tmplist = []
for iorien in xrange(len(matched_pairs)):
assigned_ptls_in_groups[matched_pairs[iorien][1]] +=assigned_ptls_in_groups[matched_pairs[iorien][0]]
tmplist.append(matched_pairs[iorien][0])
reassignment = []
for iorien in xrange(len(assigned_ptls_in_groups)):
if iorien not in tmplist: reassignment.append(sorted(assigned_ptls_in_groups[iorien]))
return reassignment
def findall_dict(value, L, start=0):
"""
return a list of all indices of a value on the list L beginning from position start
"""
positions = []
lL = len(L) - 1
i = start - 1
while(i < lL):
i +=1
try:
if value == L[i]: positions.append(i)
except: pass
return positions
def do_assignment_by_dmatrix_orien_group(peaks, orien_group_members, number_of_groups):
import numpy as np
import random
#print(len(orien_group_members))
nima = len(orien_group_members)
dmatrix = [None]*(nima*number_of_groups)
for im in xrange(nima):
for iref in xrange(number_of_groups):
dmatrix[iref*nima+im] = peaks[iref][orien_group_members[im]]*(-1.)
# do dmatrix
dd = np.argsort(dmatrix)
maxasi = nima/number_of_groups
ngs = number_of_groups
id_list = [ [] for i in xrange(number_of_groups)]
del_row = [ False for i in xrange(number_of_groups)]
del_column = [ False for i in xrange(nima)]
walktrough = 0
while ngs > 0:
flag = True
while flag:
l = dd[walktrough]
igroup = l/nima
iptl = l%nima
if del_row[igroup] or del_column[iptl]: walktrough +=1
else: flag = False
id_list[igroup].append(iptl)
if ngs>1:
if (len(id_list[igroup]) < maxasi): igroup = -1
else: ngs -= 1
else:
if len(id_list[igroup]) < maxasi+nima%number_of_groups: igroup = -1
else: ngs -= 1
del_column[iptl] = True
if (igroup != -1): del_row[igroup] = True
id_list1 = []
for iref in xrange(number_of_groups):
for im in xrange(maxasi):
id_list1.append(id_list[iref][im])
if nima%number_of_groups !=0:
for im in xrange(maxasi, maxasi+ nima%number_of_groups):
id_list1.append(id_list[igroup][im])
id_list1.append(igroup)
id_list = [[] for i in xrange(number_of_groups)]
maxasi = nima/number_of_groups
for iref in xrange(maxasi*number_of_groups):
id_list[iref/maxasi].append(id_list1[iref])
if nima%number_of_groups !=0:
for iptl in xrange(nima%maxasi):
id_list[id_list1[-1]].append(id_list1[maxasi*number_of_groups+iptl])
for iref in xrange(number_of_groups):
id_list[iref].sort()
del id_list1
assignment = [None]*nima
for iref in xrange(number_of_groups):
for im in id_list[iref]: assignment[im] = iref
del dmatrix
del dd
del id_list
del del_column
del del_row
return assignment
def get_orien_assignment_mpi(angle_step, partids, params, log_main):
global Tracker, Blockdata
from applications import MPI_start_end
from utilities import even_angles, wrap_mpi_recv, wrap_mpi_bcast, wrap_mpi_send, read_text_row, read_text_file, getvec
sym_class = Blockdata["symclass"]
if Blockdata["myid"] == Blockdata["main_node"]:
msg = " Generate sampling orientations for EQKmeans with step %f theta1 %f theta2 %f"%(Tracker["angle_step"], Tracker["tilt1"], Tracker["tilt2"])
log_main.add(msg)
print(msg)
image_start, image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], Blockdata["myid"])
if Blockdata["myid"] == Blockdata["main_node"]:
orien_group_assignment = [None for im in xrange(Tracker["total_stack"])]
else: orien_group_assignment = 0
#refa = even_angles(angle_step, symmetry = Tracker["constants"]["symmetry"], \
# theta1 = Tracker["tilt1"], theta2 = Tracker["tilt2"], method='S', phiEqpsi="Zero")
refa = sym_class.even_angles(angle_step, theta1 = Tracker["tilt1"], theta2 = Tracker["tilt2"])
#print(refa)
refa_vecs = []
for i in xrange(len(refa)):
tmp = getvec(refa[i][0], refa[i][1])
refa_vecs.append(tmp)
if Blockdata["main_node"] == Blockdata["myid"]:
params = read_text_row(params)
partids = read_text_file(partids, -1)
if len(partids) == 1: partids = partids[0]
else: partids = partids[1]
data_angles = [[None,None] for im in xrange(len(partids))]
for im in xrange(len(partids)):
data_angles[im] = getvec(params[partids[im]][0], params[partids[im]][1])
del params
del partids
else: data_angles = 0
data_angles = wrap_mpi_bcast(data_angles, Blockdata["main_node"], MPI_COMM_WORLD)
data_angles = data_angles[image_start: image_end]
local_orien_group_assignment = partition_data_into_orientation_groups_nompi(refa_vecs, data_angles)
if Blockdata["myid"] == Blockdata["main_node"]: orien_group_assignment[image_start:image_end] = local_orien_group_assignment[:]
else: orien_group_assignment = 0
if Blockdata["main_node"] != Blockdata["myid"]: wrap_mpi_send(local_orien_group_assignment, Blockdata["main_node"], MPI_COMM_WORLD)
else:
for iproc in xrange(Blockdata["nproc"]):
iproc_image_start, iproc_image_end = MPI_start_end(Tracker["total_stack"], Blockdata["nproc"], iproc)
if iproc != Blockdata["main_node"]:
dummy = wrap_mpi_recv(iproc, MPI_COMM_WORLD)
orien_group_assignment[iproc_image_start:iproc_image_end] = dummy[:]
del dummy
mpi_barrier(MPI_COMM_WORLD)
orien_group_assignment = wrap_mpi_bcast(orien_group_assignment, Blockdata["main_node"], MPI_COMM_WORLD)
ptls_in_orien_groups = [ None for iref in xrange(len(refa_vecs))]
for iorien in xrange(len(refa_vecs)):
if iorien%Blockdata["nproc"] == Blockdata["myid"]: ptls_in_orien_groups[iorien] = findall_dict(iorien, orien_group_assignment)
mpi_barrier(MPI_COMM_WORLD)
for iorien in xrange(len(refa_vecs)):
if iorien%Blockdata["nproc"]!= Blockdata["main_node"]:
if iorien%Blockdata["nproc"]==Blockdata["myid"]: wrap_mpi_send(ptls_in_orien_groups[iorien], Blockdata["main_node"], MPI_COMM_WORLD)
if Blockdata["myid"] ==Blockdata["main_node"]: ptls_in_orien_groups[iorien] = wrap_mpi_recv(iorien%Blockdata["nproc"], MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
zero_member_group_found = 0
if Blockdata["myid"] == Blockdata["main_node"]:
small_groups = []
for iorien in xrange(len(refa_vecs)):
if len(ptls_in_orien_groups[iorien]) N:
angle_step +=0.1
Tracker["angle_step"] = angle_step
del sym_class
return
def compare_two_iterations(assignment1, assignment2, number_of_groups):
# compare two assignments during clustering, either iteratively or independently
import numpy as np
assigned_groups1 =[[] for i in xrange(number_of_groups)]
for im in xrange(len(assignment1)):assigned_groups1[assignment1[im]].append(im)
res1 = []
for iref in xrange(number_of_groups):
a = np.array(assigned_groups1[iref],"int32")
a.sort()
res1.append(a)
assigned_groups2 =[[] for i in xrange(number_of_groups)]
for im in xrange(len(assignment2)): assigned_groups2[assignment2[im]].append(im)
res2 = []
for iref in xrange(number_of_groups):
a = np.array(assigned_groups2[iref],"int32")
a.sort()
res2.append(a)
del a
newindeces, list_stable, nb_tot_objs = k_means_match_clusters_asg_new(res1, res2)
del res1
del res2
return float(nb_tot_objs)/len(assignment1), newindeces, list_stable
def update_data_assignment(cdata, rdata, assignment, proc_list, nosmearing, myid):
nima = len(cdata)
groupids = assignment[proc_list[myid][0]:proc_list[myid][1]]
for im in xrange(nima):
try: previous_group = cdata[im].get_attr("group")
except: previous_group = -1
cdata[im].set_attr("group", groupids[im])
if nosmearing:
rdata[im].set_attr("group", groupids[im])
rdata[im].set_attr("previous_group", previous_group)
else:
for jm in xrange(len(rdata[im])):
rdata[im][jm].set_attr("previous_group", previous_group)
rdata[im][jm].set_attr("group", groupids[im])
return
def update_rdata_assignment(assignment, proc_list, myid, rdata):
nima = len(rdata)
groupids = assignment[proc_list[myid][0]:proc_list[myid][1]]
for im in xrange(nima): rdata[im].set_attr("group", groupids[im])
return
def MPI_volume_start_end(number_of_groups, ncolor, mycolor):
igroup_start = int(round(float(number_of_groups)/ncolor*mycolor))
igroup_end = int(round(float(number_of_groups)/ncolor*(mycolor+1)))
return igroup_start, igroup_end
## conversion
def copy_refinement_tracker(tracker_refinement):
global Tracker, Blockdata
for key, value in Tracker:
try:
value_refinement = tracker_refinement[key]
#if value != value_refinement:
# if Blockdata["myid"] == Blockdata["main_node"]:
# print(key, " in sorting set as ", value, ", while in refinement, it is set as ", value_refinement)
if value == None and value_refinement != None: Tracker[key] = value_refinement
except:
if Blockdata["myid"] == Blockdata["main_node"]: print(key, " in sorting set as ", value, ", while in refinement, it is set as ", value_refinement)
return
def print_dict(dict,theme, exclude = "refinement"):
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
print(line,theme)
spaces = " "
if exclude =="refinement": exclude = ["constants", "nodes", "yr", "output", "shared_comm", "bckgnoise", "myid", "myid_on_node", "accumulatepw", "chunk_dict", "PW_dict", "full_list", "rshifts", "refang"]
else: exclude = ["constants", "chunk_dict", "PW_dict", "full_list", "rshifts", "refang"]
for key, value in sorted( dict.items() ):
pt = True
for ll in exclude:
if(key == ll):
pt = False
break
if pt: print(" => ", key+spaces[len(key):],": ",value)
# --------------------------------------------------------------------------
# - "Tracker" (dictionary) object
# Keeps the current state of option settings and dataset
# (i.e. particle stack, reference volume, reconstructed volume, and etc)
# Each iteration is allowed to add new fields/keys
# if necessary. This happes especially when type of 3D Refinement or metamove changes.
# Conceptually, each iteration will be associated to a specific Tracker state.
# Therefore, the list of Tracker state represents the history of process.
#
# This can be used to restart process from an arbitrary iteration.
##<<<-----------rec3d for sorting------->>>>>>>>>
def stepone(tvol, tweight):
global Tracker, Blockdata
tvol.set_attr("is_complex",1)
ovol = Util.shrinkfvol(tvol,2)
owol = Util.shrinkfvol(tweight,2)
if( Tracker["constants"]["symmetry"] != "c1" ):
ovol = ovol.symfvol(Tracker["constants"]["symmetry"], -1)
owol = owol.symfvol(Tracker["constants"]["symmetry"], -1)
return Util.divn_cbyr(ovol,owol)
def steptwo_mpi(tvol, tweight, treg, cfsc = None, regularized = True, color = 0):
global Tracker, Blockdata
n_iter = 10
if( Blockdata["color"] != color ):return model_blank(1) # This should not be executed if called properly
if( Blockdata["myid_on_node"] == 0 ):
nz = tweight.get_zsize()
ny = tweight.get_ysize()
nx = tweight.get_xsize()
tvol.set_attr("is_complex",1)
if regularized:
nr = len(cfsc)
limitres = 0
for i in xrange(nr):
cfsc[i] = min(max(cfsc[i], 0.0), 0.999)
if( cfsc[i] == 0.0 ):
limitres = i-1
break
if( limitres == 0 ): limitres = nr-2;
ovol = reshape_1d(cfsc, nr, 2*nr)
limitres = 2*min(limitres, Tracker["maxfrad"]) # 2 on account of padding, which is always on
maxr2 = limitres**2
for i in xrange(limitres+1, len(ovol), 1): ovol[i] = 0.0
ovol[0] = 1.0
it = model_blank(2*nr)
for i in xrange(2*nr): it[i] = ovol[i]
del ovol
# Do not regularize first four
for i in xrange(5): treg[i] = 0.0
Util.reg_weights(tweight, treg, it)
del it
else:
limitres = 2*min(Tracker["constants"]["nnxo"]//2, Tracker["maxfrad"])
maxr2 = limitres**2
# Iterative weights
if( Tracker["constants"]["symmetry"] != "c1" ):
tvol = tvol.symfvol(Tracker["constants"]["symmetry"], limitres)
tweight = tweight.symfvol(Tracker["constants"]["symmetry"], limitres)
else:
tvol = model_blank(1)
tweight = model_blank(1)
nz = 0
ny = 0
nx = 0
maxr2 = 0
nx = bcast_number_to_all(nx, source_node = 0, mpi_comm = Blockdata["shared_comm"])
ny = bcast_number_to_all(ny, source_node = 0, mpi_comm = Blockdata["shared_comm"])
nz = bcast_number_to_all(nz, source_node = 0, mpi_comm = Blockdata["shared_comm"])
maxr2 = bcast_number_to_all(maxr2, source_node = 0, mpi_comm = Blockdata["shared_comm"])
vol_data = get_image_data(tvol)
we_data = get_image_data(tweight)
# tvol is overwritten, meaning it is also an output
ifi = mpi_iterefa( vol_data.__array_interface__['data'][0] , we_data.__array_interface__['data'][0] , nx, ny, nz, maxr2, \
Tracker["constants"]["nnxo"], Blockdata["myid_on_node"], color, Blockdata["no_of_processes_per_group"], Blockdata["shared_comm"], n_iter)
if( Blockdata["myid_on_node"] == 0 ):
# Either pad or window in F space to 2*nnxo
nx = tvol.get_ysize()
if( nx > 2*Tracker["constants"]["nnxo"]): tvol = fdecimate(tvol, 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], False, False)
elif(nx < 2*Tracker["constants"]["nnxo"]): tvol = fpol(tvol, 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], RetReal = False, normalize = False)
tvol = fft(tvol)
tvol = cyclic_shift(tvol,Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
tvol = Util.window(tvol, Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
tvol.div_sinc(1)
tvol = cosinemask(tvol, Tracker["constants"]["nnxo"]//2-1,5, None)
return tvol
else: return None
### non_mpi
def steptwo(tvol, tweight, treg, cfsc = None, regularized = True):
global Tracker, Blockdata
nz = tweight.get_zsize()
ny = tweight.get_ysize()
nx = tweight.get_xsize()
tvol.set_attr("is_complex",1)
if regularized:
nr = len(cfsc)
limitres = 0
for i in xrange(nr):
cfsc[i] = min(max(cfsc[i], 0.0), 0.999)
#print( i,cfsc[i] )
if( cfsc[i] == 0.0 ):
limitres = i-1
break
if( limitres == 0 ): limitres = nr-2;
ovol = reshape_1d(cfsc, nr, 2*nr)
limitres = 2*min(limitres, Tracker["maxfrad"]) # 2 on account of padding, which is always on
maxr2 = limitres**2
for i in xrange(limitres+1, len(ovol), 1): ovol[i] = 0.0
ovol[0] = 1.0
it = model_blank(2*nr)
for i in xrange(2*nr): it[i] = ovol[i]
del ovol
# Do not regularize first four
for i in xrange(5): treg[i] = 0.0
Util.reg_weights(tweight, treg, it)
del it
else:
limitres = 2*min(Tracker["constants"]["nnxo"]//2, Tracker["maxfrad"])
maxr2 = limitres**2
# Iterative weights
if( Tracker["constants"]["symmetry"] != "c1" ):
tvol = tvol.symfvol(Tracker["constants"]["symmetry"], limitres)
tweight = tweight.symfvol(Tracker["constants"]["symmetry"], limitres)
# tvol is overwritten, meaning it is also an output
Util.iterefa(tvol, tweight, maxr2, Tracker["constants"]["nnxo"])
# Either pad or window in F space to 2*nnxo
nx = tvol.get_ysize()
if( nx > 2*Tracker["constants"]["nnxo"] ):
tvol = fdecimate(tvol, 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], False, False)
elif(nx < 2*Tracker["constants"]["nnxo"]):
tvol = fpol(tvol, 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], 2*Tracker["constants"]["nnxo"], RetReal = False, normalize = False)
tvol = fft(tvol)
tvol = cyclic_shift(tvol,Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
tvol.set_attr("npad",2)
tvol.div_sinc(1)
tvol.del_attr("npad")
tvol = Util.window(tvol, Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"],Tracker["constants"]["nnxo"])
tvol = cosinemask(tvol,Tracker["constants"]["nnxo"]//2-1,5, None)# clean artifacts in corners
return tvol
####<<<<<-----------
def recons3d_4nnsorting_MPI(myid, main_node, prjlist, random_subset, CTF = True, upweighted = True, mpi_comm= None, target_size=-1):
"""
recons3d_4nn_ctf - calculate CTF-corrected 3-D reconstruction from a set of projections using three Eulerian angles, two shifts, and CTF settings for each projeciton image
Input
list_of_prjlist: list of lists of projections to be included in the reconstruction
"""
from utilities import reduce_EMData_to_root, random_string, get_im, findall, model_blank, info, get_params_proj
from filter import filt_table
from reconstruction import insert_slices_pdf
from fundamentals import fft
from statistics import fsc
from EMAN2 import Reconstructors
from mpi import MPI_COMM_WORLD, mpi_barrier
import types
import datetime
if mpi_comm == None: mpi_comm = MPI_COMM_WORLD
imgsize = prjlist[0].get_ysize() # It can be Fourier, so take y-size
refvol = model_blank(target_size)
refvol.set_attr("fudge", 1.0)
if CTF: do_ctf = 1
else: do_ctf = 0
fftvol = EMData()
weight = EMData()
params = {"size":target_size, "npad":2, "snr":1.0, "sign":1, "symmetry":"c1", "refvol":refvol, "fftvol":fftvol, "weight":weight, "do_ctf": do_ctf}
r = Reconstructors.get( "nn4_ctfw", params )
r.setup()
#if norm_per_particle == None: norm_per_particle = len(prjlist)*[1.0]
for im in xrange(len(prjlist)):
phi, theta, psi, s2x, s2y = get_params_proj(prjlist[im], xform = "xform.projection") # shifts are already applied
if random_subset == 2:
bckgn = target_size*[1.]
if prjlist[im].get_attr("is_complex") == 0: prjlist[im] = fft(prjlist[im])
prjlist[im].set_attr_dict({"padffted":1, "is_complex":1})
if not upweighted: prjlist[im] = filt_table(prjlist[im], bckgn)
prjlist[im].set_attr("bckgnoise", bckgn)
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
else:
if prjlist[im].get_attr("chunk_id") == random_subset:
#try: bckgn = prjlist[im].get_attr("bckgnoise")
bckgn = target_size*[1.]
if prjlist[im].get_attr("is_complex")==0:
prjlist[im] = fft(prjlist[im])
prjlist[im].set_attr_dict({"padffted":1, "is_complex":1})
if not upweighted: prjlist[im] = filt_table(prjlist[im], bckgn)
prjlist[im].set_attr("bckgnoise", bckgn)
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
# clean stuff
reduce_EMData_to_root(fftvol, myid, main_node, comm=mpi_comm)
reduce_EMData_to_root(weight, myid, main_node, comm=mpi_comm)
if myid == main_node: dummy = r.finish(True)
mpi_barrier(mpi_comm)
if myid == main_node: return fftvol, weight, refvol
else: return None, None, None
def recons3d_4nnsorting_group_MPI(myid, main_node, prjlist, random_subset, group_ID, CTF = True, upweighted = True, mpi_comm= None, target_size=-1):
"""
recons3d_4nn_ctf - calculate CTF-corrected 3-D reconstruction from a set of projections using three Eulerian angles, two shifts, and CTF settings for each projeciton image
Input
list_of_prjlist: list of lists of projections to be included in the reconstruction
"""
from utilities import reduce_EMData_to_root, random_string, get_im, findall
from EMAN2 import Reconstructors
from utilities import model_blank, info
from filter import filt_table
from mpi import MPI_COMM_WORLD, mpi_barrier
from statistics import fsc
from reconstruction import insert_slices_pdf
from fundamentals import fft
import datetime, types
if mpi_comm == None: mpi_comm = MPI_COMM_WORLD
imgsize = prjlist[0].get_ysize() # It can be Fourier, so take y-size
refvol = model_blank(target_size)
refvol.set_attr("fudge", 1.0)
if CTF: do_ctf = 1
else: do_ctf = 0
fftvol = EMData()
weight = EMData()
try: qt = projlist[0].get_attr("qt")
except: qt = 1.0
params = {"size":target_size, "npad":2, "snr":1.0, "sign":1, "symmetry":"c1", "refvol":refvol, "fftvol":fftvol, "weight":weight, "do_ctf": do_ctf}
r = Reconstructors.get( "nn4_ctfw", params )
r.setup()
for im in xrange(len(prjlist)):
phi, theta, psi, s2x, s2y = get_params_proj(prjlist[im], xform = "xform.projection") # shifts are already applied
if prjlist[im].get_attr("group") == group_ID:
if random_subset == 2:
try: bckgn = prjlist[im].get_attr("bckgnoise")
except: bckgn = target_size*[1.]
if prjlist[im].get_attr("is_complex") == 0: image = fft(prjlist[im])
else: image = prjlist[im].copy()
image.set_attr_dict({"padffted":1, "is_complex":1})
if not upweighted: image = filt_table(image, bckgn)
image.set_attr("bckgnoise", bckgn)
r.insert_slice(image, Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
else:
if prjlist[im].get_attr("chunk_id") == random_subset:
try: bckgn = prjlist[im].get_attr("bckgnoise")
except: bckgn = target_size*[1.]
if prjlist[im].get_attr("is_complex")==0: image = fft(prjlist[im])
else: image = prjlist[im].copy()
image.set_attr_dict({"padffted":1, "is_complex":1})
if not upweighted: image = filt_table(image, bckgn)
image.set_attr("bckgnoise", bckgn)
r.insert_slice(image, Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
reduce_EMData_to_root(fftvol, myid, main_node, comm=mpi_comm)
reduce_EMData_to_root(weight, myid, main_node, comm=mpi_comm)
if myid == main_node: dummy = r.finish(True)
mpi_barrier(mpi_comm)
if myid == main_node: return fftvol, weight, refvol
else: return None, None, None
def do3d_sorting(procid, data):
global Tracker, Blockdata
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if(Blockdata["myid"] == Blockdata["main_node"]):print(line, "do3d_sorting")
tvol, tweight, trol = recons3d_4nnsorting_MPI(myid = Blockdata["myid"], main_node = Blockdata["nodes"][procid], prjlist = data,\
random_subset = procid, CTF = Tracker["constants"]["CTF"], upweighted = False, target_size = (2*Tracker["nxinit"]+3))
if(Blockdata["myid"] == Blockdata["nodes"][procid]):
if(procid == 0):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")):os.mkdir(os.path.join(Tracker["directory"],"tempdir"))
tvol.set_attr("is_complex",0)
tvol.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_%01d.hdf"%procid))
tweight.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_%01d.hdf"%procid))
trol.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_%01d.hdf"%procid))
mpi_barrier(MPI_COMM_WORLD)
return
def do3d_sorting_groups(particle_ID_index, partstack):
global Tracker, Blockdata
from utilities import get_im, wrap_mpi_bcast
data = get_shrink_data_sorting(particle_ID_index, partstack)
do3d_sorting_group_insertion(data)
mpi_barrier(MPI_COMM_WORLD)
fsc143 = 0
fsc05 = 0
Tracker["fsc143"] = 0
Tracker["fsc05"] = 0
res_05 = Blockdata["no_of_groups"]*[0]
res_143 = Blockdata["no_of_groups"]*[0]
for index_of_colors in xrange(Blockdata["no_of_groups"]):
group_start, group_end = MPI_volume_start_end(Tracker["number_of_groups"], Blockdata["no_of_groups"], index_of_colors)
if Blockdata["color"] == index_of_colors: # It has to be 1 to avoid problem with tvol1 not closed on the disk
for index_of_group in xrange(group_start, group_end):
cfsc = 0
if Blockdata["myid_on_node"] == 0:
tvol0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_0_%d.hdf")%index_of_group)
tweight0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_0_%d.hdf")%index_of_group)
tvol1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_1_%d.hdf")%index_of_group)
tweight1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_1_%d.hdf")%index_of_group)
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
send_EMData(tweight1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
shrank0 = stepone(tvol0, tweight0)
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
tvol1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tweight1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
mpi_barrier(Blockdata["shared_comm"])
if Blockdata["myid_on_node"] == 0:
tag = 7007
send_EMData(shrank0, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
del shrank0
lcfsc = 0
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
shrank0 = recv_EMData(0, tag, Blockdata["shared_comm"])
cfsc = fsc(shrank0, shrank1)[1]
write_text_row(cfsc, os.path.join(Tracker["directory"], "fsc_driver_%d.txt")%index_of_group)
del shrank0, shrank1
if(Tracker["nxinit"]"
if Blockdata["myid"] == Blockdata["main_node"]:print(line, "start backprojection of %d volumes"%Tracker["number_of_groups"])
if randomset ==1:
for index_of_groups in xrange(Tracker["number_of_groups"]):
for procid in xrange(2, 3):
tvol, tweight, trol = recons3d_4nnsorting_group_MPI(myid = Blockdata["myid"], main_node = Blockdata["nodes"][0],\
prjlist = data, random_subset = procid, group_ID = index_of_groups, CTF = Tracker["constants"]["CTF"],\
upweighted = False, target_size = (2*Tracker["nxinit"]+3))
if(Blockdata["myid"] == Blockdata["nodes"][procid]):
tvol.set_attr("is_complex",0)
tvol.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_%d_%d.hdf"%(procid, index_of_groups)))
tweight.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_%d_%d.hdf"%(procid, index_of_groups)))
trol.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_%d_%d.hdf"%(procid, index_of_groups)))
mpi_barrier(MPI_COMM_WORLD)
else:
for index_of_groups in xrange(Tracker["number_of_groups"]):
for procid in xrange(2):
tvol, tweight, trol = recons3d_4nnsorting_group_MPI(myid = Blockdata["myid"], main_node = Blockdata["nodes"][procid], \
prjlist = data, random_subset = procid, group_ID = index_of_groups, CTF = Tracker["constants"]["CTF"],\
upweighted = False, target_size = (2*Tracker["nxinit"]+3))
if(Blockdata["myid"] == Blockdata["nodes"][procid]):
tvol.set_attr("is_complex",0)
tvol.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_%d_%d.hdf"%(procid, index_of_groups)))
tweight.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_%d_%d.hdf"%(procid, index_of_groups)))
trol.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_%d_%d.hdf"%(procid, index_of_groups)))
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
return
def do3d_sorting_groups_trl_iter(data, iteration):
global Tracker, Blockdata
from utilities import get_im, write_text_row, bcast_number_to_all, wrap_mpi_bcast
keepgoing = 1
if(Blockdata["myid"] == Blockdata["nodes"][0]):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")): os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
do3d_sorting_group_insertion(data)
mpi_barrier(MPI_COMM_WORLD)
fsc143 = 0
fsc05 = 0
Tracker["fsc143"] = 0
Tracker["fsc05"] = 0
res_05 = Tracker["number_of_groups"]*[0]
res_143 = Tracker["number_of_groups"]*[0]
for index_of_colors in xrange(Blockdata["no_of_groups"]):
group_start, group_end = MPI_volume_start_end(Tracker["number_of_groups"], Blockdata["no_of_groups"], index_of_colors)
if Blockdata["color"] == index_of_colors: # It has to be 1 to avoid problem with tvol1 not closed on the disk
for index_of_group in xrange(group_start, group_end):
cfsc = 0
if Blockdata["myid_on_node"] == 0:
tvol0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_0_%d.hdf")%index_of_group)
tweight0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_0_%d.hdf")%index_of_group)
tvol1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_1_%d.hdf")%index_of_group)
tweight1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_1_%d.hdf")%index_of_group)
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
send_EMData(tweight1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
shrank0 = stepone(tvol0, tweight0)
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
tvol1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tweight1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
mpi_barrier(Blockdata["shared_comm"])
if Blockdata["myid_on_node"] == 0:
tag = 7007
send_EMData(shrank0, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
del shrank0
lcfsc = 0
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
shrank0 = recv_EMData(0, tag, Blockdata["shared_comm"])
# Note shrank volumes are Fourier uncentered.
cfsc = fsc(shrank0, shrank1)[1]
write_text_row(cfsc, os.path.join(Tracker["directory"], "fsc_driver_grp%03d_iter%03d.txt")%(index_of_group,iteration))
del shrank0, shrank1
if(Tracker["nxinit"]"
if Blockdata["myid"] == Blockdata["main_node"]:
msg = "Import results from SPARX 3-D refinement"
print(line, msg)
log_main.add(msg)
if Tracker["constants"]["niter_for_sorting"] == -1: # take the best solution to do sorting
msg = "Search in the directory %s ......"%Tracker["constants"]["refinement_dir"]
print(line, msg)
log_main.add(msg)
niter_refinement = 0
while os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%niter_refinement)) and os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"],"main%03d"%niter_refinement, "Tracker_%03d.json"%niter_refinement)):
niter_refinement +=1
niter_refinement -=1
if niter_refinement !=0:
fout = open(os.path.join(Tracker["constants"]["refinement_dir"],"main%03d"%niter_refinement, "Tracker_%03d.json"%niter_refinement),'r')
Tracker_refinement = convert_json_fromunicode(json.load(fout))
fout.close()
selected_iter = Tracker_refinement["constants"]["best"]
else: import_from_sparx_refinement = 0
else:
msg = "Try to load json file ...%s"%os.path.join(Tracker["constants"]["refinement_dir"],"main%03d"%Tracker["constants"]["niter_for_sorting"],\
"Tracker_%03d.json"%Tracker["constants"]["niter_for_sorting"])
print(line, msg)
log_main.add(msg)
try:
fout = open(os.path.join(Tracker["constants"]["refinement_dir"],"main%03d"%Tracker["constants"]["niter_for_sorting"], \
"Tracker_%03d.json"%Tracker["constants"]["niter_for_sorting"]),'r')
Tracker_refinement = convert_json_fromunicode(json.load(fout))
fout.close()
selected_iter = Tracker["constants"]["niter_for_sorting"]
except: import_from_sparx_refinement = 0
else: selected_iter = -1
selected_iter = bcast_number_to_all(selected_iter, Blockdata["main_node"], MPI_COMM_WORLD)
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:
ERROR("The best solution is not found","get_input_from_sparx_ref3d", "get_input_from_sparx_ref3d", 1, Blockdata["myid"])
from mpi import mpi_finalize
mpi_finalize()
exit()
Tracker_refinement = wrap_mpi_bcast(Tracker_refinement, Blockdata["main_node"], communicator = MPI_COMM_WORLD)
# Check orgstack, set correct path
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if Blockdata["myid"] == Blockdata["main_node"]:
refinement_dir_path, refinement_dir_name = os.path.split(Tracker["constants"]["refinement_dir"])
if Tracker_refinement["constants"]["stack"][0:4]=="bdb:": refinement_stack = "bdb:"+os.path.join(refinement_dir_path, Tracker_refinement["constants"]["stack"][4:])
else: refinement_stack = os.path.join(refinement_dir_path, Tracker_refinement["constants"]["stack"])
if not Tracker["constants"]["orgstack"]: # Use refinement stack if instack is not provided
msg = "refinement stack %s"%refinement_stack
print(line, msg)
log_main.add(msg)
Tracker["constants"]["orgstack"] = refinement_stack #Tracker_refinement["constants"]["stack"]
print(line, "The refinement image stack is %s"%Tracker_refinement["constants"]["stack"])
try: image = get_im(Tracker["constants"]["orgstack"], 0)
except:
print(line, "Fail to read image stack")
import_from_sparx_refinement = 0
else:
if Tracker["constants"]["orgstack"] == Tracker_refinement["constants"]["stack"]: # instack and refinement data stack is the same
msg = "The sorting instack is the same refinement instack: %s"%Tracker_refinement["constants"]["stack"]
print(line, msg)
log_main.add(msg)
if not os.path.exists(Tracker["constants"]["orgstack"]): import_from_sparx_refinement = 0
else: # complicated cases
if (not os.path.exists(Tracker["constants"]["orgstack"])) and (not os.path.exists(Tracker_refinement["constants"]["stack"])):
import_from_sparx_refinement = 0
elif (not os.path.exists(Tracker["constants"]["orgstack"])) and os.path.exists(Tracker_refinement["constants"]["stack"]):
old_stack = Tracker["constants"]["stack"]
if old_stack[0:3] == "bdb":
Tracker["constants"]["orgstack"] = "bdb:" + Tracker["constants"]["refinement_dir"]+"/../"+old_stack[4:]
else: Tracker["constants"]["orgstack"] = os.path.join(option_old_refinement_dir, "../", old_stack)
msg = "Use refinement orgstack "
print(line, msg)
log_main.add(msg)
else:
msg = "Use orgstack provided by options"
print(line, msg)
log_main.add(msg)
if import_from_sparx_refinement:
msg = "data stack for sorting is %s"%Tracker["constants"]["orgstack"]
print(line, msg)
log_main.add(msg)
total_stack = EMUtil.get_image_count(Tracker["constants"]["orgstack"])
else: total_stack = 0
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:ERROR("The data stack is not accessible","get_input_from_sparx_ref3d",1, Blockdata["myid"])
total_stack = bcast_number_to_all(total_stack, source_node = Blockdata["main_node"])
Tracker["constants"]["total_stack"] = total_stack
# Now copy relevant refinement files to sorting directory:
if Blockdata["myid"] == Blockdata["main_node"]:
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, "params_%03d.txt"%selected_iter)):
copyfile( os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, \
"params_%03d.txt"%selected_iter), os.path.join(Tracker["constants"]["masterdir"], "sparx_refinement_params.txt"))
else: import_from_sparx_refinement = 0
Tracker["constants"]["selected_iter"] = selected_iter
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:ERROR("The parameter file of the best solution is not accessible", "get_input_from_sparx_ref3d", 1, Blockdata["myid"])
if Blockdata["myid"] == Blockdata["main_node"]:
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, "bckgnoise.hdf")):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, "bckgnoise.hdf"),\
os.path.join(Tracker["constants"]["masterdir"], "bckgnoise.hdf"))
else:
import_from_sparx_refinement == 0
for search_iter in xrange(selected_iter-1, 0, -1):
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%search_iter, "bckgnoise.hdf")):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%search_iter, \
"bckgnoise.hdf"), os.path.join(Tracker["constants"]["masterdir"], "bckgnoise.hdf"))
import_from_sparx_refinement = 1
break
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:
Tracker["bckgnoise"] = None
if Blockdata["myid"] == Blockdata["main_node"]: print("Noise file is not found. However we continue")
else: Tracker["bckgnoise"] = os.path.join(Tracker["constants"]["masterdir"], "bckgnoise.hdf")
import_from_sparx_refinement = 1
if Blockdata["myid"] == Blockdata["main_node"]:
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, "driver_%03d.txt"%selected_iter)):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main%03d"%selected_iter, \
"driver_%03d.txt"%selected_iter), os.path.join(Tracker["constants"]["masterdir"], "fsc_global.txt"))
else: import_from_sparx_refinement = 0
#Tracker["constants"]["selected_iter"] = selected_iter
if import_from_sparx_refinement: fsc_curve = read_text_row(os.path.join(Tracker["constants"]["masterdir"], "fsc_global.txt"))
fsc143 = 0
fsc05 = 0
for ifreq in xrange(len(fsc_curve)): # drive has only one column
if fsc_curve[ifreq][0] < 0.5: break
fsc05 = ifreq - 1
for ifreq in xrange(len(fsc_curve)):
if fsc_curve[ifreq][0] < 0.143: break
fsc143 = ifreq - 1
Tracker["constants"]["fsc143"] = fsc143
Tracker["constants"]["fsc05"] = fsc05
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:ERROR("The driver of the best solution is not accessible","get_input_from_sparx_ref3d", 1, Blockdata["myid"])
if Blockdata["myid"] == Blockdata["main_node"]:
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main000/indexes_000.txt")):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main000/indexes_000.txt"), \
os.path.join(Tracker["constants"]["masterdir"], "indexes.txt"))
else: import_from_sparx_refinement = 0
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0: ERROR("The index file of the best solution are not accessible","get_input_from_sparx_ref3d", 1, Blockdata["myid"])
if Blockdata["myid"] == Blockdata["main_node"]:
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main000/chunk_0_000.txt")):
copyfile( os.path.join(Tracker["constants"]["refinement_dir"], "main000/chunk_0_000.txt"), \
os.path.join(Tracker["constants"]["masterdir"], "chunk_0.txt"))
else: import_from_sparx_refinement == 0
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main000/chunk_1_000.txt")):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main000/chunk_1_000.txt"), \
os.path.join(Tracker["constants"]["masterdir"], "chunk_1.txt"))
else: import_from_sparx_refinement == 0
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main000/particle_groups_0.txt")):
copyfile(os.path.join(Tracker["constants"]["refinement_dir"], "main000/particle_groups_0.txt"), \
os.path.join(Tracker["constants"]["masterdir"], "particle_groups_0.txt"))
else: import_from_sparx_refinement == 0
if os.path.exists(os.path.join(Tracker["constants"]["refinement_dir"], "main000/particle_groups_1.txt")):
copyfile( os.path.join(Tracker["constants"]["refinement_dir"], "main000/particle_groups_1.txt"), \
os.path.join(Tracker["constants"]["masterdir"], "particle_groups_1.txt"))
else:import_from_sparx_refinement == 0
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if import_from_sparx_refinement == 0:ERROR("The chunk files and partice group files are not accessible","get_input_from_sparx_ref3d",1, Blockdata["myid"])
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
# copy all relavant parameters into sorting tracker
if Blockdata["myid"] == Blockdata["main_node"]:
if Tracker["constants"]["radius"] == -1: Tracker["constants"]["radius"] = Tracker_refinement["constants"]["radius"]
Tracker["constants"]["nnxo"] = Tracker_refinement["constants"]["nnxo"]
Tracker["constants"]["orgres"] = Tracker_refinement["bestres"]
Tracker["delta"] = Tracker_refinement["delta"]
Tracker["ts"] = Tracker_refinement["ts"]
Tracker["xr"] = Tracker_refinement["xr"]
Tracker["constants"]["pixel_size"] = Tracker_refinement["constants"]["pixel_size"]
Tracker["avgnorm"] = Tracker_refinement["avgvaradj"]
if Tracker["constants"]["nxinit"]<0: Tracker["nxinit_refinement"] = Tracker_refinement["nxinit"] #Sphire window size
else: Tracker["nxinit_refinement"] = Tracker["constants"]["nxinit"] #User defined window size
try: sym = Tracker_refinement["constants"]["sym"]
except: sym = Tracker_refinement["constants"]["symmetry"]
Tracker["constants"]["symmetry"] = sym
print(line, "Parameters importing is done!")
if not Tracker["constants"]["mask3D"] and Tracker_refinement["constants"]["mask3D"]:
refinement_mask3D_path, refinement_mask3D_file = os.path.split(Tracker_refinement["constants"]["mask3D"])# MRK_DEBUG
copyfile( os.path.join(refinement_dir_path, Tracker_refinement["constants"]["mask3D"]), \
os.path.join(Tracker["constants"]["masterdir"], refinement_mask3D_file))
Tracker["constants"]["mask3D"] = os.path.join(Tracker["constants"]["masterdir"], refinement_mask3D_file)
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], communicator = MPI_COMM_WORLD)
import_from_sparx_refinement = bcast_number_to_all(import_from_sparx_refinement, source_node = Blockdata["main_node"])
if not import_from_sparx_refinement:ERROR("Import parameters from SPARX refinement failed", "get_input_from_sparx_ref3d", 1, Blockdata["myid"])
# Setting for margin error
chunk_dict = {}
group_dict = {}
if(Blockdata["myid"] == Blockdata["main_node"]):
chunk_one = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"chunk_0.txt"))
chunk_two = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"chunk_1.txt"))
else:
chunk_one = 0
chunk_two = 0
chunk_one = wrap_mpi_bcast(chunk_one, Blockdata["main_node"])
chunk_two = wrap_mpi_bcast(chunk_two, Blockdata["main_node"])
#
if(Blockdata["myid"] == Blockdata["main_node"]):
chunk_one_group = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"particle_groups_0.txt"))
chunk_two_group = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"particle_groups_1.txt"))
else:
chunk_one_group = 0
chunk_two_group = 0
chunk_one_group = wrap_mpi_bcast(chunk_one_group, Blockdata["main_node"])
chunk_two_group = wrap_mpi_bcast(chunk_two_group, Blockdata["main_node"])
for index_of_element in xrange(len(chunk_one)):
chunk_dict[chunk_one[index_of_element]] = 0
group_dict[chunk_one[index_of_element]] = chunk_one_group[index_of_element]
for index_of_element in xrange(len(chunk_two)):
chunk_dict[chunk_two[index_of_element]] = 1
group_dict[chunk_two[index_of_element]] = chunk_two_group[index_of_element]
Tracker["chunk_dict"] = chunk_dict
Tracker["P_chunk_0"] = len(chunk_one)/float(total_stack)
Tracker["P_chunk_1"] = len(chunk_two)/float(total_stack)
if(Blockdata["myid"] == Blockdata["main_node"]):
chunk_ids = []
group_ids = []
partids = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "indexes.txt"),-1)
partids = partids[0]
Tracker["constants"]["total_stack"] = len(partids)
params = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "sparx_refinement_params.txt"),-1)
for index_of_particle in xrange(len(partids)):
chunk_ids.append(chunk_dict[partids[index_of_particle]])
group_ids.append(group_dict[partids[index_of_particle]])
refinement_params = [ params[0], params[1], params[2], params[3], params[4], chunk_ids, group_ids, params[7]]
write_text_file(refinement_params, os.path.join(Tracker["constants"]["masterdir"], "refinement_parameters.txt"))
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
print(line, "Initialization of sorting from SPARX refinement is done")
else: Tracker["constants"]["total_stack"] = 0
Tracker["constants"]["total_stack"] = bcast_number_to_all(Tracker["constants"]["total_stack"], Blockdata["main_node"], MPI_COMM_WORLD)
Tracker["total_stack"] = Tracker["constants"]["total_stack"]
Tracker["constants"]["partstack"] = os.path.join(Tracker["constants"]["masterdir"], "refinement_parameters.txt")
total_stack = Tracker["constants"]["total_stack"]
Tracker["currentres"] = float(Tracker["constants"]["fsc05"])/float(Tracker["constants"]["nxinit"])
Tracker["bckgnoise"] = os.path.join(Tracker["constants"]["masterdir"], "bckgnoise.hdf")
# Now copy oldparamstruture
copy_oldparamstructure_from_meridien_MPI(selected_iter, log_main)
return import_from_sparx_refinement
def get_input_from_datastack(log_main):# Case three
global Tracker, Blockdata
from utilities import write_text_file, write_text_row, wrap_mpi_bcast
import json
from string import split, atoi
from random import shuffle
import_from_data_stack = 1
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if(Blockdata["myid"] == Blockdata["main_node"]):
msg = "Import xform.projection paramters from data stack %s "%Tracker["constants"]["orgstack"]
print(line, msg)
log_main.add(msg)
image = get_im(Tracker["constants"]["orgstack"])
Tracker["constants"]["nnxo"] = image.get_xsize()
if( Tracker["nxinit"] > Tracker["constants"]["nnxo"]):
ERROR("Image size less than minimum permitted $d"%Tracker["nxinit"],"get_input_from_datastack",1, Blockdata["myid"])
nnxo = -1
else:
if Tracker["constants"]["CTF"]:
ictf = image.get_attr('ctf')
Tracker["constants"]["pixel_size"] = ictf.apix
else:
Tracker["constants"]["pixel_size"] = 1.0
del image
else:
Tracker["constants"]["nnxo"] = 0
Tracker["constants"]["pixel_size"] = 1.0
Tracker["constants"]["nnxo"] = bcast_number_to_all(Tracker["constants"]["nnxo"], source_node = Blockdata["main_node"])
if( Tracker["constants"]["nnxo"] < 0): ERROR("Image size is negative", "get_input_from_datastack", 1, Blockdata["main_node"])
Tracker["constants"]["pixel_size"] = bcast_number_to_all(Tracker["constants"]["pixel_size"], source_node = Blockdata["main_node"])
if(Tracker["constants"]["radius"] < 1): Tracker["constants"]["radius"] = Tracker["constants"]["nnxo"]//2-2
elif((2*Tracker["constants"]["radius"] +2) > Tracker["constants"]["nnxo"]): ERROR("Particle radius set too large!", \
"get_input_from_datastack",1, Blockdata["myid"])
if Blockdata["myid"] == Blockdata["main_node"]: total_stack = EMUtil.get_image_count(Tracker["constants"]["orgstack"])
else: total_stack = 0
total_stack = bcast_number_to_all(total_stack, Blockdata["main_node"])
# randomly assign two subsets
Tracker["constants"]["total_stack"] = total_stack
Tracker["constants"]["chunk_0"] = os.path.join(Tracker["constants"]["masterdir"],"chunk_0.txt")
Tracker["constants"]["chunk_1"] = os.path.join(Tracker["constants"]["masterdir"],"chunk_1.txt")
Tracker["constants"]["partstack"] = os.path.join(Tracker["constants"]["masterdir"], "refinement_parameters.txt")
Tracker["previous_parstack"] = os.path.join(Tracker["constants"]["masterdir"], "refinement_parameters.txt")#
###
Tracker["refang"], Tracker["rshifts"], Tracker["delta"] = None, None, None
Tracker["avgnorm"] =1.0
chunk_dict = {}
chunk_list = []
if Blockdata["myid"] == Blockdata["main_node"]:
chunk_dict = {}
tlist = range(total_stack)
write_text_file(tlist, os.path.join(Tracker["constants"]["masterdir"], "indexes.txt"))
shuffle(tlist)
chunk_one = tlist[0:total_stack//2]
chunk_two = tlist[total_stack//2:]
chunk_one = sorted(chunk_one)
chunk_two = sorted(chunk_two)
write_text_row(chunk_one,Tracker["constants"]["chunk_0"])
write_text_row(chunk_two,Tracker["constants"]["chunk_1"])
for particle in chunk_one: chunk_dict[particle] = 0
for particle in chunk_two: chunk_dict[particle] = 1
xform_proj_list = EMUtil.get_all_attributes(Tracker["constants"]["orgstack"], "xform.projection")
for index_of_particle in xrange(len(xform_proj_list)):
dp = xform_proj_list[index_of_particle].get_params("spider")
xform_proj_list[index_of_particle] = [dp["phi"], dp["theta"], dp["psi"], -dp["tx"], -dp["ty"], chunk_dict[index_of_particle]]
write_text_row(xform_proj_list, Tracker["constants"]["partstack"])
else:
chunk_one = 0
chunk_two = 0
chunk_one = wrap_mpi_bcast(chunk_one, Blockdata["main_node"])
chunk_two = wrap_mpi_bcast(chunk_two, Blockdata["main_node"])
for element in chunk_one: chunk_dict[element] = 0
for element in chunk_two: chunk_dict[element] = 1
chunk_list = [chunk_one, chunk_two]
Tracker["chunk_dict"] = chunk_dict
Tracker["P_chunk_0"] = len(chunk_one)/float(total_stack)
Tracker["P_chunk_1"] = len(chunk_two)/float(total_stack)
# Reconstruction to determine the resolution in orignal data size
Tracker["nxinit"] = Tracker["constants"]["nnxo"]
Tracker["shrinkage"] = float(Tracker["nxinit"])/float(Tracker["constants"]["nnxo"])
Tracker["bckgnoise"] = None
temp = model_blank(Tracker["constants"]["nnxo"], Tracker["constants"]["nnxo"])
nny = temp.get_ysize()
Blockdata["bckgnoise"] = [1.0]*nny # set for initial recon3D of data from stack
Tracker["focus3D"] = None
Tracker["fuse_freq"] = int(Tracker["constants"]["pixel_size"]*Tracker["constants"]["nnxo"]/Tracker["constants"]["fuse_freq"] +0.5)
Tracker["directory"] = Tracker["constants"]["masterdir"]
if Tracker["constants"]["nxinit"]< 0: Tracker["nxinit_refinement"] = Tracker["constants"]["nnxo"]
else: Tracker["nxinit_refinement"] = Tracker["constants"]["nxinit"]
for procid in xrange(2):
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if Blockdata["myid"] == Blockdata["main_node"]: print(line, "Reconstruction of random subset %d"%procid)
data = get_shrink_data_sorting(os.path.join(Tracker["constants"]["masterdir"],"chunk_%01d.txt"%procid), Tracker["constants"]["partstack"])
mpi_barrier(MPI_COMM_WORLD)
do3d_sorting(procid, data)
mpi_barrier(MPI_COMM_WORLD)
if( Blockdata["myid"] == Blockdata["main_shared_nodes"][1]): # It has to be 1 to avoid problem with tvol1 not closed on the disk
print("come to mainshared_nodes 1", Blockdata["myid"])
print(Blockdata["main_shared_nodes"], Blockdata["no_of_processes_per_group"])
tvol0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_0.hdf"))
tweight0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_0.hdf"))
tvol1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_1.hdf"))
tweight1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_1.hdf"))
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["main_shared_nodes"][0], tag, MPI_COMM_WORLD)
send_EMData(tweight1, Blockdata["main_shared_nodes"][0], tag, MPI_COMM_WORLD)
shrank0 = stepone(tvol0, tweight0)
send_EMData(shrank0, Blockdata["main_shared_nodes"][0], tag, MPI_COMM_WORLD)
del shrank0
lcfsc = 0
elif( Blockdata["myid"] == Blockdata["main_shared_nodes"][0]):
print("come to mainshared_nodes 1", Blockdata["myid"])
tag = 7007
tvol1 = recv_EMData(Blockdata["main_shared_nodes"][1], tag, MPI_COMM_WORLD)
tweight1 = recv_EMData(Blockdata["main_shared_nodes"][1], tag, MPI_COMM_WORLD)
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
# Get shrank volume, do fsc, send it to all
shrank0 = recv_EMData(Blockdata["main_shared_nodes"][1], tag, MPI_COMM_WORLD)
# Note shrank volumes are Fourier uncentered.
cfsc = fsc(shrank0, shrank1)[1]
write_text_row(cfsc, os.path.join(Tracker["directory"], "fsc_global.txt"))
del shrank0, shrank1
if(Tracker["nxinit"] -1):
orgstack = Tracker["constants"]["orgstack"]
selected_iter = option_selected_iter
if Blockdata["subgroup_myid"] == Blockdata["main_node"]: cluster = sorted(read_text_file(option_selected_cluster))
else: cluster = 0
cluster = wrap_mpi_bcast(cluster, Blockdata["main_node"], comm) # balance processors
old_refinement_iter_dir = os.path.join(option_old_refinement_dir, "main%03d"%selected_iter)
old_oldparamstructure_dir = os.path.join(old_refinement_iter_dir, "oldparamstructure")
old_previousoutputdir = os.path.join(option_old_refinement_dir, "main%03d"%(selected_iter-1))
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
nproc_old_ref3d = 0
while os.path.exists(os.path.join(old_oldparamstructure_dir, "oldparamstructure_0_%03d_%03d.json"%(nproc_old_ref3d, selected_iter))):nproc_old_ref3d += 1
else: nproc_old_ref3d = 0
nproc_old_ref3d = bcast_number_to_all(nproc_old_ref3d, Blockdata["main_node"], comm)
# read old refinement Tracker
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
fout = open(os.path.join(old_refinement_iter_dir, "Tracker_%03d.json"%selected_iter),"r")
Tracker = convert_json_fromunicode(json.load(fout))
fout.close()
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], comm) # balance processors
Tracker["constants"]["orgstack"] = orgstack
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
noiseimage = get_im(os.path.join(old_previousoutputdir, "bckgnoise.hdf"))
noiseimage1 = get_im(os.path.join(old_refinement_iter_dir, "bckgnoise.hdf"))
params = read_text_row(os.path.join(old_refinement_iter_dir, "params_%03d.txt"%selected_iter))
params_last_iter = read_text_row(os.path.join(old_previousoutputdir, "params_%03d.txt"%(selected_iter-1)))
refang = read_text_row(os.path.join(old_refinement_iter_dir, "refang.txt"))
rshifts = read_text_row(os.path.join(old_refinement_iter_dir, "rshifts.txt"))
chunk_one = read_text_file(os.path.join(old_refinement_iter_dir, "chunk_0_%03d.txt"%selected_iter))
chunk_two = read_text_file(os.path.join(old_refinement_iter_dir, "chunk_1_%03d.txt"%selected_iter))
error_threshold = read_text_row(os.path.join(old_refinement_iter_dir, "error_thresholds_%03d.txt"%selected_iter))
else:
params = 0
refang = 0
rshifts = 0
chunk_one = 0
chunk_two = 0
params_last_iter = 0
params = wrap_mpi_bcast(params, Blockdata["main_node"], comm)
params_last_iter = wrap_mpi_bcast(params_last_iter, Blockdata["main_node"], comm)
refang = wrap_mpi_bcast(refang, Blockdata["main_node"], comm)
rshifts = wrap_mpi_bcast(rshifts, Blockdata["main_node"], comm)
chunk_one = wrap_mpi_bcast(chunk_one, Blockdata["main_node"], comm)
chunk_two = wrap_mpi_bcast(chunk_two, Blockdata["main_node"], comm)
chunk_dict = {}
for a in chunk_one: chunk_dict[a] = 0
for b in chunk_two: chunk_dict[b] = 1
### handle the selected cluster
# create directories
main0_dir = os.path.join(masterdir, "main000")
iter_dir = os.path.join(masterdir, "main%03d"%selected_iter)
previousoutputdir = os.path.join(masterdir, "main%03d"%(selected_iter-1))
new_oldparamstructure_dir = os.path.join(iter_dir,"oldparamstructure")
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
if not os.path.exists(iter_dir): os.mkdir(iter_dir)
if not os.path.exists(main0_dir):os.mkdir(main0_dir)
if not os.path.exists(new_oldparamstructure_dir):os.mkdir(new_oldparamstructure_dir)
if not os.path.exists(previousoutputdir):os.mkdir(previousoutputdir)
mpi_barrier(comm)
# load selected iter
new_chunk_one = []
new_chunk_two = []
new_params = []
new_params_chunk_one = []
new_params_chunk_two = []
new_params_chunk_one_last_iter = []
new_params_chunk_two_last_iter = []
Tracker["avgvaradj"] = [0.0, 0.0]
for index_of_particle in xrange(len(cluster)):
if chunk_dict[cluster[index_of_particle]] == 0:
new_chunk_one.append(cluster[index_of_particle])
new_params_chunk_one.append(params[cluster[index_of_particle]])
new_params_chunk_one_last_iter.append(params_last_iter[cluster[index_of_particle]])
Tracker["avgvaradj"][0] += params[cluster[index_of_particle]][7]
else:
new_chunk_two.append(cluster[index_of_particle])
new_params_chunk_two.append(params[cluster[index_of_particle]])
new_params_chunk_two_last_iter.append(params_last_iter[cluster[index_of_particle]])
Tracker["avgvaradj"][1] += params[cluster[index_of_particle]][7]
new_params.append(params[cluster[index_of_particle]])
selected_new_params = new_params
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:# some numbers and path are required to be modified
Tracker["constants"]["masterdir"] = masterdir
Tracker["directory"] = iter_dir
try: sym = Tracker["constants"]["sym"] # For those generated by old version meridians
except: sym = Tracker["constants"]["symmetry"]
Tracker["constants"]["symmetry"] = sym
Tracker["best"] = selected_iter +2 # reset the best to arbitrary iteration
Tracker["bestres"] = 0
Tracker["no_improvement"] = 0
Tracker["no_params_changes"] = 0
Tracker["pixercutoff"] = 0
Tracker["saturated_sampling"] = False
Tracker["is_converged"] = False
Tracker["large_at_Nyquist"] = False
Tracker["previousoutputdir"] = previousoutputdir
Tracker["refvol"] = os.path.join(iter_dir, "vol_0_%03d.hdf"%selected_iter)
Tracker["mainiteration"] = selected_iter
if shell_line_command: update_tracker(shell_line_command) # the updated could be any refinement parameters that user wish to make change
error_angles, error_shifts = params_changes((new_params_chunk_one + new_params_chunk_two), (new_params_chunk_one_last_iter + new_params_chunk_two_last_iter))
# varibles in Tracker to be updated
if Tracker["constants"]["mask3D"]:
Tracker["constants"]["mask3D"] = os.path.join(option_old_refinement_dir, "../", Tracker["constants"]["mask3D"])
if not os.path.exists(Tracker["constants"]["mask3D"]): Tracker["constants"]["mask3D"] = None
noiseimage.write_image(os.path.join(Tracker["previousoutputdir"], "bckgnoise.hdf"))
noiseimage1.write_image(os.path.join(iter_dir, "bckgnoise.hdf"))
write_text_file(cluster, os.path.join(iter_dir, "indexes_%03d.txt"%selected_iter))
write_text_row(refang, os.path.join(iter_dir, "refang.txt"))
write_text_row(rshifts, os.path.join(iter_dir, "rshifts.txt"))
write_text_row(new_params_chunk_one, os.path.join(iter_dir, "params-chunk_0_%03d.txt"%selected_iter))
write_text_row(new_params_chunk_two, os.path.join(iter_dir, "params-chunk_1_%03d.txt"%selected_iter))
write_text_row(new_params_chunk_one_last_iter, os.path.join(Tracker["previousoutputdir"], "params-chunk_0_%03d.txt"%(selected_iter -1)))
write_text_row(new_params_chunk_two_last_iter, os.path.join(Tracker["previousoutputdir"], "params-chunk_1_%03d.txt"%(selected_iter -1)))
write_text_file(new_chunk_one, os.path.join(iter_dir, "chunk_0_%03d.txt"%selected_iter))
write_text_file(new_chunk_two, os.path.join(iter_dir, "chunk_1_%03d.txt"%selected_iter))
write_text_row(new_params, os.path.join(iter_dir, "params_%03d.txt"%selected_iter))
write_text_row([[error_angles, error_shifts]], os.path.join(iter_dir, "error_thresholds_%03d.txt"%selected_iter))
Tracker["nima_per_chunk"] = [len(new_chunk_one), len(new_chunk_two)]
Tracker["avgvaradj"][0] /=float(len(new_chunk_one))
Tracker["avgvaradj"][1] /=float(len(new_chunk_two))
fout = open(os.path.join(iter_dir, "Tracker_%03d.json"%selected_iter),"w")
json.dump(Tracker, fout)
fout.close()
# now partition new indexes into new oldparamstructure
nproc_dict = {}
for ichunk in xrange(2):
if ichunk == 0: total_stack_on_chunk = len(chunk_one)
else: total_stack_on_chunk = len(chunk_two)
for myproc in xrange(nproc_old_ref3d):
image_start,image_end = MPI_start_end(total_stack_on_chunk, nproc_old_ref3d, myproc)
for index_of_particle in xrange(image_start, image_end):
if ichunk == 0: nproc_dict[chunk_one[index_of_particle]] = [ichunk, myproc, index_of_particle - image_start]
else: nproc_dict[chunk_two[index_of_particle]] = [ichunk, myproc, index_of_particle - image_start]
else: nproc_dict = 0
nproc_dict = wrap_mpi_bcast(nproc_dict, Blockdata["main_node"], comm)
### parse previous nproc in refinement to current nproc
#proc_start, proc_end = MPI_start_end(Blockdata["nsubset"], Blockdata["nsubset"], Blockdata["subgroup_myid"])
for ichunk in xrange(2):
oldparams = []
if ichunk == 0: total_stack_on_chunk = len(new_chunk_one)
else: total_stack_on_chunk = len(new_chunk_two)
image_start,image_end = MPI_start_end(total_stack_on_chunk, Blockdata["nsubset"], Blockdata["subgroup_myid"])
for index_of_particle in xrange(image_start,image_end):
if ichunk == 0: [old_chunk, old_proc, old_index_of_particle] = nproc_dict[new_chunk_one[index_of_particle]]
else: [old_chunk, old_proc, old_index_of_particle] = nproc_dict[new_chunk_two[index_of_particle]]
fout = open(os.path.join(old_oldparamstructure_dir, "oldparamstructure_%d_%03d_%03d.json"%(old_chunk, old_proc, selected_iter)),"r")
old_oldparams = convert_json_fromunicode(json.load(fout))
fout.close()
oldparams.append(old_oldparams[old_index_of_particle])
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
fout = open(os.path.join(new_oldparamstructure_dir, "oldparamstructure_%d_%03d_%03d.json"%(ichunk, Blockdata["subgroup_myid"], selected_iter)), "w")
json.dump(oldparams, fout)
fout.close()
del oldparams
mpi_barrier(comm)
for iproc in xrange(1, Blockdata["nsubset"]): # always skip main node
if iproc == Blockdata["subgroup_myid"]:wrap_mpi_send(oldparams, Blockdata["main_node"], comm)
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
dummy = wrap_mpi_recv(iproc, comm)
fout = open(os.path.join(new_oldparamstructure_dir, "oldparamstructure_%d_%03d_%03d.json"%(ichunk, iproc, selected_iter)), "w")
json.dump(dummy, fout)
fout.close()
del dummy
mpi_barrier(comm)
mpi_barrier(comm)
mpi_barrier(comm)
### <<<-------load 0 iteration
selected_iter = 0
old_refinement_iter_dir = os.path.join(option_old_refinement_dir, "main%03d"%selected_iter)
old_oldparamstructure_dir = os.path.join(old_refinement_iter_dir, "oldparamstructure")
iter_dir = os.path.join(masterdir, "main%03d"%selected_iter)
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
fout = open(os.path.join(old_refinement_iter_dir, "Tracker_%03d.json"%selected_iter),"r")
Tracker = convert_json_fromunicode(json.load(fout))
fout.close()
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], comm) # balance processors
Tracker["constants"]["orgstack"] = orgstack
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
if not os.path.exists(iter_dir):os.mkdir(iter_dir)
mpi_barrier(comm)
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
params = read_text_row(os.path.join(old_refinement_iter_dir, "params_%03d.txt"%selected_iter))
chunk_one = read_text_file(os.path.join(old_refinement_iter_dir, "chunk_0_%03d.txt"%selected_iter))
chunk_two = read_text_file(os.path.join(old_refinement_iter_dir, "chunk_1_%03d.txt"%selected_iter))
particle_group_one = read_text_file(os.path.join(old_refinement_iter_dir, "particle_groups_0.txt"))
particle_group_two = read_text_file(os.path.join(old_refinement_iter_dir, "particle_groups_1.txt"))
groupids = read_text_file(os.path.join(old_refinement_iter_dir, "groupids.txt"))
else:
groupids = 0
params = 0
refang = 0
rshifts = 0
chunk_one = 0
chunk_two = 0
particle_group_one = 0
particle_group_two = 0
params = wrap_mpi_bcast(params, Blockdata["main_node"], comm)
chunk_one = wrap_mpi_bcast(chunk_one, Blockdata["main_node"], comm)
chunk_two = wrap_mpi_bcast(chunk_two, Blockdata["main_node"], comm)
particle_group_one = wrap_mpi_bcast(particle_group_one, Blockdata["main_node"], comm)
particle_group_two = wrap_mpi_bcast(particle_group_two, Blockdata["main_node"], comm)
groupids = wrap_mpi_bcast(groupids, Blockdata["main_node"], comm)
group_ids_dict = {}
for iptl in xrange(len(particle_group_one)): group_ids_dict[chunk_one[iptl]] = particle_group_one[iptl]
for iptl in xrange(len(particle_group_two)): group_ids_dict[chunk_two[iptl]] = particle_group_two[iptl]
chunk_dict = {}
for a in chunk_one: chunk_dict[a] = 0
for b in chunk_two: chunk_dict[b] = 1
### handle the selected cluster
new_chunk_one = []
new_chunk_two = []
new_params = []
new_params_chunk_one = []
new_params_chunk_two = []
new_particle_group_one = []
new_particle_group_two = []
for index_of_particle in xrange(len(cluster)):
if chunk_dict[cluster[index_of_particle]] == 0:
new_chunk_one.append(cluster[index_of_particle])
new_params_chunk_one.append(params[cluster[index_of_particle]])
else:
new_chunk_two.append(cluster[index_of_particle])
new_params_chunk_two.append(params[cluster[index_of_particle]])
new_params.append(params[cluster[index_of_particle]])
for iptl in xrange(len(new_chunk_one)):new_particle_group_one.append(group_ids_dict[new_chunk_one[iptl]])
for iptl in xrange(len(new_chunk_two)):new_particle_group_two.append(group_ids_dict[new_chunk_two[iptl]])
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:# some numbers and path are required to be modified
# varibles in Tracker to be updated
Tracker["constants"]["masterdir"] = masterdir
Tracker["previousoutputdir"] = Tracker["directory"]
Tracker["refvol"] = os.path.join(iter_dir, "vol_0_%03d.hdf"%selected_iter)
Tracker["mainiteration"] = selected_iter
if Tracker["constants"]["mask3D"]:
Tracker["constants"]["mask3D"]= os.path.join(option_old_refinement_dir, "../", Tracker["constants"]["mask3D"])
if not os.path.exists(Tracker["constants"]["mask3D"]): Tracker["constants"]["mask3D"] = None
write_text_file(cluster, os.path.join(iter_dir, "indexes_%03d.txt"%selected_iter))
write_text_file(groupids, os.path.join(iter_dir, "groupids.txt"))
write_text_row(new_params, os.path.join(iter_dir, "params_%03d.txt"%selected_iter))
write_text_row(new_params_chunk_one, os.path.join(iter_dir, "params-chunk_0_%03d.txt"%selected_iter))
write_text_row(new_params_chunk_two, os.path.join(iter_dir, "params-chunk_1_%03d.txt"%selected_iter))
write_text_file(new_chunk_one, os.path.join(iter_dir, "chunk_0_%03d.txt"%selected_iter))
write_text_file(new_chunk_two, os.path.join(iter_dir, "chunk_1_%03d.txt"%selected_iter))
write_text_file(new_particle_group_one, os.path.join(iter_dir, "particle_groups_0.txt"))
write_text_file(new_particle_group_two, os.path.join(iter_dir, "particle_groups_1.txt"))
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], comm) # balance processors
Tracker["constants"]["orgstack"] = orgstack
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:# some numbers and path are required to be modified
fout = open(os.path.join(iter_dir, "Tracker_%03d.json"%selected_iter),"w")
json.dump(Tracker, fout)
fout.close()
mpi_barrier(comm)
return
#######functions for ctreffromsorting
def ctrefromsorting_rec3d_faked_iter(masterdir, selected_iter=-1, rec3d_image_size =-1, comm = -1):
global Tracker, Blockdata
import json
#from mpi import mpi_barrier, MPI_COMM_WORLD
if comm ==-1: comm = MPI_COMM_WORLD
if Blockdata["subgroup_myid"]>-1:
Tracker["directory"] = os.path.join(masterdir, "main%03d"%selected_iter)
Tracker["previousoutputdir"] = os.path.join(masterdir, "main%03d"%(selected_iter-1))
oldparamstructure =[[],[]]
newparamstructure =[[],[]]
projdata = [[model_blank(1,1)], [model_blank(1,1)]]
original_data = [None,None]
oldparams = [[],[]]
partids = [None, None]
partstack = [None, None]
final_dir = Tracker["directory"]
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
fout = open(os.path.join(Tracker["directory"], "Tracker_%03d.json"%selected_iter),"r")
Tracker = convert_json_fromunicode(json.load(fout))
fout.close()
if rec3d_image_size !=-1: Tracker["nxinit"] = rec3d_image_size
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], comm) # balance processors
Blockdata["accumulatepw"] = [[],[]]
if selected_iter ==-1: ERROR("Iteration number has to be determined in advance.","ctrefromsorting_rec3d_faked_iter",1, Blockdata["subgroup_myid"])
carryon = 1
if(Blockdata["subgroup_myid"] == Blockdata["main_node"]):
try:
refang = read_text_row( os.path.join(Tracker["directory"], "refang.txt"))
rshifts = read_text_row( os.path.join(Tracker["directory"], "rshifts.txt"))
except:carryon =0
else:
refang = 0
rshifts = 0
carryon = bcast_number_to_all(carryon, source_node = Blockdata["main_node"], mpi_comm = comm)
if carryon == 0: ERROR("Failed to read refang and rshifts: %s %s "%(os.path.join(Tracker["directory"], "refang.txt"), os.path.join(Tracker["directory"], \
"rshifts.txt")), "ctrefromsorting_rec3d_faked_iter", 1,Blockdata["subgroup_myid"])
refang = wrap_mpi_bcast(refang, Blockdata["main_node"], comm)
rshifts = wrap_mpi_bcast(rshifts, Blockdata["main_node"], comm)
partids =[None, None]
if(Blockdata["subgroup_myid"] == Blockdata["main_node"]):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")): os.mkdir (os.path.join(Tracker["directory"], "tempdir"))
l = 0
for procid in xrange(2):
partids[procid] = os.path.join(Tracker["directory"],"chunk_%01d_%03d.txt"%(procid,Tracker["mainiteration"]))
l += len(read_text_file(partids[procid]))
else:l = 0
l = bcast_number_to_all(l, source_node = Blockdata["main_node"], mpi_comm = comm)
norm_per_particle = [[],[]]
for procid in xrange(2):
if procid ==0: original_data[1] = None
partids[procid] = os.path.join(Tracker["directory"],"chunk_%01d_%03d.txt"%(procid,Tracker["mainiteration"]))
partstack[procid] = os.path.join(Tracker["constants"]["masterdir"],"main%03d"%(Tracker["mainiteration"]-1),"params-chunk_%01d_%03d.txt"%(procid,(Tracker["mainiteration"]-1)))
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
nproc_previous = 0
while os.path.exists(os.path.join(Tracker["directory"],"oldparamstructure","oldparamstructure_%01d_%03d_%03d.json"%(procid, nproc_previous, Tracker["mainiteration"]))):nproc_previous += 1
psize = len(read_text_file(partids[procid]))
for jproc in xrange(Blockdata["nsubset"]):
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
dummy = []
im_start, im_end = MPI_start_end(psize, Blockdata["nsubset"], jproc)
istart_old_proc_id = -1
iend_old_proc_id = -1
plist = []
for iproc_old in xrange(nproc_previous):
im_start_old, im_end_old = MPI_start_end(psize, nproc_previous, iproc_old)
if (im_start>= im_start_old) and im_start <=im_end_old: istart_old_proc_id = iproc_old
if (im_end>= im_start_old) and im_end <=im_end_old: iend_old_proc_id = iproc_old
plist.append([im_start_old, im_end_old])
ptl_on_this_cpu = im_start
for iproc_index_old in xrange(istart_old_proc_id, iend_old_proc_id+1):
fout = open(os.path.join(final_dir,"oldparamstructure","oldparamstructure_%01d_%03d_%03d.json"%(procid,iproc_index_old,Tracker["mainiteration"])),'r')
oldparamstructure_on_old_cpu = convert_json_fromunicode(json.load(fout))
fout.close()
mlocal_id_on_old = ptl_on_this_cpu - plist[iproc_index_old][0]
while (mlocal_id_on_old 0.0 ): tmp1[k] = 2.0/qt
#smooth
tmp1[0] = tmp1[1]
tmp1[-1] = tmp1[-2]
for ism in xrange(0): #2
for k in xrange(1,nv-1): tmp2[k] = (tmp1[k-1]+tmp1[k]+tmp1[k+1])/3.0
for k in xrange(1,nv-1): tmp1[k] = tmp2[k]
# We will keep 0-element the same as first tsd.set_value_at(0,i,1.0)
for k in xrange(1,nv):tsd.set_value_at(k,i,tmp1[k])
tsd.set_value_at(0,i,1.0)
tsd.write_image(os.path.join(Tracker["directory"],"bckgnoise.hdf"))
bcast_EMData_to_all(tsd, myid, source_node = 0, comm = mpi_comm)
nnx = tsd.get_xsize()
nny = tsd.get_ysize()
Blockdata["bckgnoise"] = []
for i in xrange(nny):
prj = model_blank(nnx)
for k in xrange(nnx): prj[k] = tsd.get_value_at(k,i)
Blockdata["bckgnoise"].append(prj) # 1.0/sigma^2
return
###
def do3d(procid, data, newparams, refang, rshifts, norm_per_particle, myid, mpi_comm = -1):
global Tracker, Blockdata
# Without filtration
from reconstruction import recons3d_trl_struct_MPI
if( mpi_comm < -1 ): mpi_comm = MPI_COMM_WORDLD
if Blockdata["subgroup_myid"]== Blockdata["main_node"]:
if( procid == 0 ):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")): os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
shrinkage = float(Tracker["nxinit"])/float(Tracker["constants"]["nnxo"])
tvol, tweight, trol = recons3d_trl_struct_MPI(myid = Blockdata["subgroup_myid"], main_node = Blockdata["main_node"], prjlist = data, \
paramstructure = newparams, refang = refang, rshifts_shrank = [[q[0]*shrinkage,q[1]*shrinkage] for q in rshifts], \
delta = Tracker["delta"], CTF = Tracker["constants"]["CTF"], upweighted = False, mpi_comm = mpi_comm, \
target_size = (2*Tracker["nxinit"]+3), avgnorm = Tracker["avgvaradj"][procid], norm_per_particle = norm_per_particle)
if Blockdata["subgroup_myid"] == Blockdata["main_node"]:
tvol.set_attr("is_complex",0)
tvol.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_%01d_%03d.hdf"%(procid,Tracker["mainiteration"])))
tweight.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_%01d_%03d.hdf"%(procid,Tracker["mainiteration"])))
trol.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_%01d_%03d.hdf"%(procid,Tracker["mainiteration"])))
mpi_barrier(mpi_comm)
return
##
def getindexdata(partids, partstack, particle_groups, original_data = None, small_memory= True, nproc =-1, myid = -1, mpi_comm = -1):
global Tracker, Blockdata
# The function will read from stack a subset of images specified in partids
# and assign to them parameters from partstack
# So, the lengths of partids and partstack are the same.
# The read data is properly distributed among MPI threads.
if( mpi_comm < 0 ): mpi_comm = MPI_COMM_WORLD
from applications import MPI_start_end
# parameters
if( myid == 0 ): partstack = read_text_row(partstack)
else: partstack = 0
partstack = wrap_mpi_bcast(partstack, 0, mpi_comm)
# particles IDs
if( myid == 0 ): partids = read_text_file(partids)
else: partids = 0
partids = wrap_mpi_bcast(partids, 0, mpi_comm)
# Group assignments
if( myid == 0 ): group_reference = read_text_file(particle_groups)
else: group_reference = 0
group_reference = wrap_mpi_bcast(group_reference, 0, mpi_comm)
im_start, im_end = MPI_start_end(len(partstack), nproc, myid)
partstack = partstack[im_start:im_end]
partids = partids[im_start:im_end]
group_reference = group_reference[im_start:im_end]
'''
particles_on_node = []
parms_on_node = []
for i in xrange( group_start, group_end ):
particles_on_node += lpartids[group_reference[i][2]:group_reference[i][3]+1] # +1 is on account of python idiosyncrasies
parms_on_node += partstack[group_reference[i][2]:group_reference[i][3]+1]
Blockdata["nima_per_cpu"][procid] = len(particles_on_node)
#ZZprint("groups_on_thread ",Blockdata["myid"],procid, Tracker["groups_on_thread"][procid])
#ZZprint(" particles ",Blockdata["myid"],Blockdata["nima_per_cpu"][procid],len(parms_on_node))
'''
"""
17 28 57 84 5
18 14 85 98 6
19 15 99 113 7
25 20 114 133 8
29 9 134 142 9
"""
#print("getindexdata", Tracker["constants"]["orgstack"])
#print(len(partids), Blockdata["myid"])
if( original_data == None or small_memory):
original_data = EMData.read_images(Tracker["constants"]["orgstack"], partids)
for im in xrange( len(original_data) ):
original_data[im].set_attr("particle_group", group_reference[im])
return original_data, partstack
#######
def do3d_sorting_groups_rec3d(iteration, masterdir, log_main):
global Tracker, Blockdata
from utilities import get_im
# reconstruct final unfiltered volumes from sorted clusters
keepgoing = 1
#if(Blockdata["myid"] == Blockdata["nodes"][0]):
# cmd = "{} {}".format("mkdir", os.path.join(Tracker["directory"], "tempdir"))
# if os.path.exists(os.path.join(Tracker["directory"], "tempdir")): print("tempdir exists")
# else: cmdexecute(cmd)
### ====
fsc143 = 0
fsc05 = 0
Tracker["fsc143"] = 0
Tracker["fsc05"] = 0
res_05 = Tracker["number_of_groups"]*[0]
res_143 = Tracker["number_of_groups"]*[0]
Tracker["directory"] = masterdir
Tracker["constants"]["masterdir"] = masterdir
for index_of_colors in xrange(Blockdata["no_of_groups"]):
group_start, group_end = MPI_volume_start_end(Tracker["number_of_groups"], Blockdata["no_of_groups"], index_of_colors)
if Blockdata["color"] == index_of_colors: # It has to be 1 to avoid problem with tvol1 not closed on the disk
for index_of_group in xrange(group_start, group_end):
Clusterdir = os.path.join(Tracker["directory"], "Cluster%d"%index_of_group, "main%03d"%iteration)
cfsc = 0
if Blockdata["myid_on_node"] == 0:
tvol0 = get_im(os.path.join(Clusterdir, "tempdir", "tvol_0_%03d.hdf")%iteration)
tweight0 = get_im(os.path.join(Clusterdir, "tempdir", "tweight_0_%03d.hdf")%iteration)
tvol1 = get_im(os.path.join(Clusterdir, "tempdir", "tvol_1_%03d.hdf")%iteration)
tweight1 = get_im(os.path.join(Clusterdir, "tempdir", "tweight_1_%03d.hdf")%iteration)
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
send_EMData(tweight1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
tvol0.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank0 = stepone(tvol0, tweight0)
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
tvol1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tweight1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
treg1 = get_im(os.path.join(Clusterdir, "tempdir", "trol_1_%03d.hdf"%iteration))
mpi_barrier(Blockdata["shared_comm"])
if Blockdata["myid_on_node"] == 0:
tag = 7007
send_EMData(shrank0, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
del shrank0
lcfsc = 0
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
tag = 7007
shrank0 = recv_EMData(0, tag, Blockdata["shared_comm"])
# Note shrank volumes are Fourier uncentered.
cfsc = fsc(shrank0, shrank1)[1]
write_text_row(cfsc, os.path.join(Tracker["directory"], "fsc_driver_grp%03d_iter%03d.txt")%(index_of_group,iteration))
del shrank0, shrank1
if(Tracker["nxinit"]"
msg = "command for sharpening maps: sxprocess.py map0.hdf map1.hdf --postprocess --mask=%s --fsc_adj --pixel_size=%f"%(Tracker["constants"]["mask3D"], Tracker["constants"]["pixel_size"])
print(line, msg)
log_main.add(msg)
if not keepgoing: ERROR("do3d_sorting_groups_trl_iter %s"%os.path.join(Tracker["directory"], "tempdir"),"do3d_sorting_groups_trl_iter", 1, Blockdata["myid"])
return
####<<<--------
### nofsc rec3d
def do3d_sorting_groups_nofsc_smearing_iter(srdata, partial_rec3d, iteration):
global Tracker, Blockdata
keepgoing = 1
if(Blockdata["myid"] == Blockdata["main_node"]):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")):os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
mpi_barrier(MPI_COMM_WORLD)
for index_of_groups in xrange(Tracker["number_of_groups"]):
if partial_rec3d:
tvol, tweight, trol = recons3d_trl_struct_group_nofsc_shifted_data_partial_MPI(Blockdata["myid"], Blockdata["main_node"], Blockdata["nproc"], srdata, index_of_groups, \
os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_groups), \
os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf"%index_of_groups), \
os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf"%index_of_groups),\
None, Tracker["constants"]["CTF"], (2*Tracker["nxinit"]+3), Tracker["nosmearing"])
else:
tvol, tweight, trol = recons3d_trl_struct_group_nofsc_shifted_data_MPI(Blockdata["myid"], Blockdata["main_node"], srdata,\
index_of_groups, None, Tracker["constants"]["CTF"], (2*Tracker["nxinit"]+3), Tracker["nosmearing"])
if(Blockdata["myid"] == Blockdata["main_node"]):
tvol.set_attr("is_complex",0)
tvol.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf"%index_of_groups))
tweight.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf"%index_of_groups))
trol.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_groups))
del tvol
del tweight
del trol
"""
if partial_rec3d:
tvol3.set_attr("is_complex",0)
tvol3.write_image(os.path.join(Tracker["directory"], "tempdir", "tvol_3_%d.hdf"%index_of_groups))
tweight3.write_image(os.path.join(Tracker["directory"], "tempdir", "tweight_3_%d.hdf"%index_of_groups))
trol3.write_image(os.path.join(Tracker["directory"], "tempdir", "trol_3_%d.hdf"%index_of_groups))
"""
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
#fsc143 = 0
#fsc05 = 0
Tracker["fsc143"] = 0
Tracker["fsc05"] = 0
Tracker["maxfrad"]= Tracker["nxinit"]//2
if Blockdata["no_of_groups"]>1:
for index_of_colors in xrange(Blockdata["no_of_groups"]):
group_start, group_end = MPI_volume_start_end(Tracker["number_of_groups"], Blockdata["no_of_groups"], index_of_colors)
if Blockdata["color"] == index_of_colors: # It has to be 1 to avoid problem with tvol1 not closed on the disk
for index_of_group in xrange(group_start, group_end):
Tracker["fsc143"] = Tracker["nxinit"]//2
Tracker["fsc05"] = Tracker["nxinit"]//2
cfsc = [0.0 for i in xrange(Tracker["constants"]["nnxo"])]
if Blockdata["fftwmpi"]:
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_group))
else:
tvol2 = model_blank(1)
tweight2 = model_blank(1)
treg2 = model_blank(1)
tvol2 = steptwo_mpi(tvol2, tweight2, treg2, cfsc, False, color = index_of_colors) # has to be False!!!
del tweight2, treg2
if(Blockdata["myid_on_node"] == 0):
if(Tracker["mask3D"] == None):tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
else:
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_group))
tvol2 = steptwo(tvol2, tweight2, treg2, cfsc, False)
del tweight2, treg2
if(Tracker["mask3D"] == None): tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
mpi_barrier(Blockdata["shared_comm"])
######
"""
if partial_rec3d:
if Blockdata["fftwmpi"]:
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_3_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_3_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_3_%d.hdf"%index_of_group))
else:
tvol2 = model_blank(1)
tweight2 = model_blank(1)
treg2 = model_blank(1)
tvol2 = steptwo_mpi(tvol2, tweight2, treg2, cfsc, False, color = index_of_colors) # has to be False!!!
del tweight2, treg2
if(Blockdata["myid_on_node"] == 0):
if(Tracker["mask3D"] == None):tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
else:
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_3_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_3_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_3_%d.hdf"%index_of_group))
tvol2 = steptwo(tvol2, tweight2, treg2, cfsc, False)
del tweight2, treg2
if(Tracker["mask3D"] == None):tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_3_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
"""
mpi_barrier(Blockdata["shared_comm"])
mpi_barrier(Blockdata["shared_comm"])
else:# loop over all groups for single node
for index_of_group in xrange(Tracker["number_of_groups"]):
Tracker["fsc143"] = Tracker["nxinit"]//2
Tracker["fsc05"] = Tracker["nxinit"]//2
cfsc = [0.0 for i in xrange(Tracker["constants"]["nnxo"])]
if Blockdata["fftwmpi"]:
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_group))
tvol2.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1})
else:
tvol2 = model_blank(1)
tweight2 = model_blank(1)
treg2 = model_blank(1)
tvol2 = steptwo_mpi(tvol2, tweight2, treg2, cfsc, False, color = Blockdata["node_volume"][0]) # has to be False!!!
del tweight2, treg2
if(Blockdata["myid_on_node"] == 0):
if(Tracker["mask3D"] == None):tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
else:# to be paralleled
if(Blockdata["myid_on_node"] == 0):
tvol2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_2_%d.hdf")%index_of_group)
tweight2 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_2_%d.hdf")%index_of_group)
treg2 = get_im(os.path.join(Tracker["directory"], "tempdir", "trol_2_%d.hdf"%index_of_group))
tvol2.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1})
tvol2 = steptwo(tvol2, tweight2, treg2, cfsc, False)
del tweight2, treg2
if(Tracker["mask3D"] == None):tvol2 = cosinemask(tvol2, radius = Tracker["constants"]["radius"])
else: Util.mul_img(tvol2, get_im(Tracker["constants"]["mask3D"]))
tvol2.write_image(os.path.join(Tracker["directory"], "vol_grp%03d_iter%03d.hdf"%(index_of_group,iteration)))
del tvol2
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
keepgoing = bcast_number_to_all(keepgoing, source_node = Blockdata["main_node"], mpi_comm = MPI_COMM_WORLD) # always check
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"])
if not keepgoing: ERROR("do3d_sorting_groups_trl_iter %s"%os.path.join(Tracker["directory"], "tempdir"),"do3d_sorting_groups_trl_iter", 1, Blockdata["myid"])
return
### nofsc insertion #1
def recons3d_trl_struct_group_nofsc_shifted_data_partial_MPI(myid, main_node, nproc, prjlist, group_ID, refvol_file, fftvol_file, weight_file, mpi_comm= None, CTF = True, target_size=-1, nosmearing = False):
"""
partial rec3d for re-assigned particles
reconstructor nn4_ctfws
"""
from utilities import reduce_EMData_to_root, random_string, get_im, findall, info, model_blank
from EMAN2 import Reconstructors
from filter import filt_table
from fundamentals import fshift
from mpi import MPI_COMM_WORLD, mpi_barrier
import types
import datetime
if mpi_comm == None: mpi_comm = MPI_COMM_WORLD
if CTF: do_ctf = 1
else: do_ctf = 0
if not os.path.exists(refvol_file):ERROR("refvol does not exist", "recons3d_trl_struct_group_nofsc_shifted_data_partial_MPI", 1, myid)
if not os.path.exists(fftvol_file):ERROR("fftvol does not exist", "recons3d_trl_struct_group_nofsc_shifted_data_partial_MPI", 1, myid)
if not os.path.exists(weight_file):ERROR("weight does not exist", "recons3d_trl_struct_group_nofsc_shifted_data_partial_MPI", 1, myid)
#refvol
if myid == main_node: target_size = get_im(refvol_file).get_xsize()
else: target_size = 0
target_size = bcast_number_to_all(target_size, main_node, mpi_comm)
refvol = model_blank(target_size)# set to zero
# fftvol
if (myid == main_node):
fftvol = get_im(fftvol_file)
fftvol /=float(nproc)
#Util.mult_scalar(fftvol, 1./float(Blockdata["nproc"]))
nxfft =fftvol.get_xsize()
nyfft =fftvol.get_ysize()
nzfft =fftvol.get_zsize()
else:
nxfft = 0
nyfft = 0
nzfft = 0
nxfft = bcast_number_to_all(nxfft, main_node, mpi_comm)
nyfft = bcast_number_to_all(nyfft, main_node, mpi_comm)
nzfft = bcast_number_to_all(nzfft, main_node, mpi_comm)
if (myid!= main_node): fftvol = model_blank(nxfft, nyfft, nzfft)
bcast_EMData_to_all(fftvol, myid, main_node)
# weight
if (myid == main_node):
weight = get_im(weight_file)
weight /=float(nproc)
#Util.mult_scalar(weight, 1./float(Blockdata["nproc"]))
nxweight = weight.get_xsize()
nyweight = weight.get_ysize()
nzweight = weight.get_zsize()
else:
nxweight = 0
nyweight = 0
nzweight = 0
nxweight = bcast_number_to_all(nxweight, main_node, mpi_comm)
nyweight = bcast_number_to_all(nyweight, main_node, mpi_comm)
nzweight = bcast_number_to_all(nzweight, main_node, mpi_comm)
if(myid != main_node): weight = model_blank(nxweight, nyweight, nzweight)
bcast_EMData_to_all(weight, myid, main_node)
params = {"size":target_size, "npad":2, "snr":1.0, "sign":1, "symmetry":"c1", "refvol":refvol, "fftvol":fftvol, "weight":weight, "do_ctf": do_ctf}
r = Reconstructors.get("nn4_ctfws", params)
r.setup()
if nosmearing:
nnx = prjlist[0].get_xsize()
nny = prjlist[0].get_ysize()
else:
nnx = prjlist[0][0].get_xsize()
nny = prjlist[0][0].get_ysize()
for im in xrange(len(prjlist)):
if nosmearing:
current_group_ID = prjlist[im].get_attr("group")
previous_group_ID = prjlist[im].get_attr("previous_group")
if current_group_ID !=previous_group_ID:
if current_group_ID == group_ID:
flag = 1.0
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im], xform = "xform.projection")
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), flag)
if previous_group_ID == group_ID:
flag = -1.0
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im], xform = "xform.projection")
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), flag)
else:
current_group_ID = prjlist[im][0].get_attr("group")
previous_group_ID = prjlist[im][0].get_attr("previous_group")
if current_group_ID !=previous_group_ID:
if current_group_ID == group_ID:
flag = 1.0
for jm in xrange(len(prjlist[im])):
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im][jm], xform = "xform.projection")
r.insert_slice(prjlist[im][jm], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), prjlist[im][jm].get_attr("wprob")*flag)
if previous_group_ID == group_ID:
flag =-1.0
for jm in xrange(len(prjlist[im])):
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im][jm], xform = "xform.projection")
r.insert_slice(prjlist[im][jm], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), prjlist[im][jm].get_attr("wprob")*flag)
reduce_EMData_to_root(fftvol, myid, main_node, comm=mpi_comm)
reduce_EMData_to_root(weight, myid, main_node, comm=mpi_comm)
if myid == main_node:dummy = r.finish(True)
mpi_barrier(mpi_comm)
if myid == main_node: return fftvol, weight, refvol
else:
del fftvol
del weight
del refvol
return None, None, None
### insertion 2
def recons3d_trl_struct_group_nofsc_shifted_data_MPI(myid, main_node, prjlist, group_ID, mpi_comm= None, CTF = True, target_size=-1, nosmearing = False):
"""
rec3d for pre-shifted data list
reconstructor nn4_ctfw
"""
from utilities import reduce_EMData_to_root, random_string, get_im, findall, info, model_blank
from EMAN2 import Reconstructors
from filter import filt_table
from fundamentals import fshift
from mpi import MPI_COMM_WORLD, mpi_barrier
import types
import datetime
if mpi_comm == None: mpi_comm = MPI_COMM_WORLD
refvol = model_blank(target_size)
refvol.set_attr("fudge", 1.0)
if CTF: do_ctf = 1
else: do_ctf = 0
fftvol = EMData()
weight = EMData()
params = {"size":target_size, "npad":2, "snr":1.0, "sign":1, "symmetry":"c1", "refvol":refvol, "fftvol":fftvol, "weight":weight, "do_ctf": do_ctf}
r = Reconstructors.get( "nn4_ctfw", params)
r.setup()
if nosmearing:
nnx = prjlist[0].get_xsize()
nny = prjlist[0].get_ysize()
else:
nnx = prjlist[0][0].get_xsize()
nny = prjlist[0][0].get_ysize()
for im in xrange(len(prjlist)):
if nosmearing:
if prjlist[im].get_attr("group") == group_ID:
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im], xform = "xform.projection")
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
else:
if prjlist[im][0].get_attr("group") == group_ID:
for jm in xrange(len(prjlist[im])):
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im][jm], xform = "xform.projection")
r.insert_slice(prjlist[im][jm], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), prjlist[im][jm].get_attr("wprob"))
reduce_EMData_to_root(fftvol, myid, main_node, comm=mpi_comm)
reduce_EMData_to_root(weight, myid, main_node, comm=mpi_comm)
if myid == main_node:dummy = r.finish(True)
mpi_barrier(mpi_comm)
if myid == main_node: return fftvol, weight, refvol
else:
del fftvol
del weight
del refvol
return None, None, None
###end of nofsc
def recons3d_trl_struct_group_MPI(myid, main_node, prjlist, random_subset, group_ID, paramstructure, norm_per_particle = None, \
upweighted = True, mpi_comm= None, CTF = True, target_size=-1, nosmearing = False):
"""
recons3d_4nn_ctf - calculate CTF-corrected 3-D reconstruction from a set of projections using three Eulerian angles, two shifts, and CTF settings for each projeciton image
Input
list_of_prjlist: list of lists of projections to be included in the reconstruction
"""
global Tracker, Blockdata
from utilities import reduce_EMData_to_root, random_string, get_im, findall, model_blank, info
from EMAN2 import Reconstructors
from filter import filt_table
from fundamentals import fshift
from mpi import MPI_COMM_WORLD, mpi_barrier
import types
import datetime
import copy
if mpi_comm == None: mpi_comm = MPI_COMM_WORLD
refvol = model_blank(target_size)
refvol.set_attr("fudge", 1.0)
if CTF: do_ctf = 1
else: do_ctf = 0
fftvol = EMData()
weight = EMData()
params = {"size":target_size, "npad":2, "snr":1.0, "sign":1, "symmetry":"c1", "refvol":refvol, "fftvol":fftvol, "weight":weight, "do_ctf": do_ctf}
r = Reconstructors.get( "nn4_ctfw", params )
r.setup()
if norm_per_particle == None: norm_per_particle = len(prjlist)*[1.0]
if not nosmearing:
delta = Tracker["delta"]
refang = Tracker["refang"]
rshifts_shrank = copy.deepcopy(Tracker["rshifts"])
nshifts = len(rshifts_shrank)
for im in xrange(len(rshifts_shrank)):
rshifts_shrank[im][0] *= float(Tracker["nxinit"])/float(Tracker["constants"]["nnxo"])
rshifts_shrank[im][1] *= float(Tracker["nxinit"])/float(Tracker["constants"]["nnxo"])
nnx = prjlist[0].get_xsize()
nny = prjlist[0].get_ysize()
for im in xrange(len(prjlist)):
if not nosmearing: avgnorm = Tracker["avgnorm"][prjlist[im].get_attr("chunk_id")]
# parse projection structure, generate three lists:
# [ipsi+iang], [ishift], [probability]
# Number of orientations for a given image
if prjlist[im].get_attr("group") == group_ID:
if random_subset == 2:
if nosmearing:
bckgn = prjlist[im].get_attr("bckgnoise")
ct = prjlist[im].get_attr("ctf")
prjlist[im].set_attr_dict( {"bckgnoise":bckgn, "ctf":ct} )
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im], xform = "xform.projection")
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
else:
numbor = len(paramstructure[im][2])
ipsiandiang = [ paramstructure[im][2][i][0]/1000 for i in xrange(numbor) ]
allshifts = [ paramstructure[im][2][i][0]%1000 for i in xrange(numbor) ]
probs = [ paramstructure[im][2][i][1] for i in xrange(numbor) ]
# Find unique projection directions
tdir = list(set(ipsiandiang))
bckgn = prjlist[im].get_attr("bckgnoise")
ct = prjlist[im].get_attr("ctf")
# For each unique projection direction:
data = [None]*nshifts
for ii in xrange(len(tdir)):
# Find the number of times given projection direction appears on the list, it is the number of different shifts associated with it.
lshifts = findall(tdir[ii], ipsiandiang)
toprab = 0.0
for ki in xrange(len(lshifts)): toprab += probs[lshifts[ki]]
recdata = EMData(nny,nny,1,False)
recdata.set_attr("is_complex",0)
for ki in xrange(len(lshifts)):
lpt = allshifts[lshifts[ki]]
if( data[lpt] == None ):
data[lpt] = fshift(prjlist[im], rshifts_shrank[lpt][0], rshifts_shrank[lpt][1])
data[lpt].set_attr("is_complex",0)
Util.add_img(recdata, Util.mult_scalar(data[lpt], probs[lshifts[ki]]/toprab))
recdata.set_attr_dict({"padffted":1, "is_fftpad":1,"is_fftodd":0, "is_complex_ri":1, "is_complex":1})
if not upweighted: recdata = filt_table(recdata, bckgn )
recdata.set_attr_dict( {"bckgnoise":bckgn, "ctf":ct} )
ipsi = tdir[ii]%100000
iang = tdir[ii]/100000
#for iloop in xrange(10000000):
#if iloop%1000==0:memory_check("before slice %d myid %d"%(iloop, Blockdata["myid"]))
r.insert_slice( recdata, Transform({"type":"spider","phi":refang[iang][0],"theta":refang[iang][1],"psi":refang[iang][2]+ipsi*delta}), toprab*avgnorm/norm_per_particle[im])
#if iloop%1000==0:memory_check("after slice %d myid %d"%(iloop, Blockdata["myid"]))
else:
if prjlist[im].get_attr("chunk_id") == random_subset:
if nosmearing:
bckgn = prjlist[im].get_attr("bckgnoise")
ct = prjlist[im].get_attr("ctf")
prjlist[im].set_attr_dict({"bckgnoise":bckgn, "ctf":ct})
[phi, theta, psi, s2x, s2y] = get_params_proj(prjlist[im], xform = "xform.projection")
r.insert_slice(prjlist[im], Transform({"type":"spider","phi":phi,"theta":theta,"psi":psi}), 1.0)
else:
numbor = len(paramstructure[im][2])
ipsiandiang = [ paramstructure[im][2][i][0]/1000 for i in xrange(numbor) ]
allshifts = [ paramstructure[im][2][i][0]%1000 for i in xrange(numbor) ]
probs = [ paramstructure[im][2][i][1] for i in xrange(numbor) ]
# Find unique projection directions
tdir = list(set(ipsiandiang))
bckgn = prjlist[im].get_attr("bckgnoise")
ct = prjlist[im].get_attr("ctf")
# For each unique projection direction:
data = [None]*nshifts
for ii in xrange(len(tdir)):
# Find the number of times given projection direction appears on the list, it is the number of different shifts associated with it.
lshifts = findall(tdir[ii], ipsiandiang)
toprab = 0.0
for ki in xrange(len(lshifts)): toprab += probs[lshifts[ki]]
recdata = EMData(nny,nny,1,False)
recdata.set_attr("is_complex",0)
for ki in xrange(len(lshifts)):
lpt = allshifts[lshifts[ki]]
if( data[lpt] == None ):
data[lpt] = fshift(prjlist[im], rshifts_shrank[lpt][0], rshifts_shrank[lpt][1])
data[lpt].set_attr("is_complex",0)
Util.add_img(recdata, Util.mult_scalar(data[lpt], probs[lshifts[ki]]/toprab))
recdata.set_attr_dict({"padffted":1, "is_fftpad":1,"is_fftodd":0, "is_complex_ri":1, "is_complex":1})
if not upweighted: recdata = filt_table(recdata, bckgn )
recdata.set_attr_dict( {"bckgnoise":bckgn, "ctf":ct} )
ipsi = tdir[ii]%100000
iang = tdir[ii]/100000
r.insert_slice(recdata, Transform({"type":"spider","phi":refang[iang][0],"theta":refang[iang][1],"psi":refang[iang][2]+ipsi*delta}), toprab*avgnorm/norm_per_particle[im])
# clean stuff
#if not nosmearing: del recdata, tdir, ipsiandiang, allshifts, probs
reduce_EMData_to_root(fftvol, myid, main_node, comm=mpi_comm)
reduce_EMData_to_root(weight, myid, main_node, comm=mpi_comm)
if not nosmearing: del rshifts_shrank
if myid == main_node:dummy = r.finish(True)
mpi_barrier(mpi_comm)
if myid == main_node: return fftvol, weight, refvol
else:
del fftvol
del weight
del refvol
return None, None, None
####<<<<--------
##### FSC rec3d
def do3d_sorting_groups_fsc_only_iter(data, paramstructure, norm_per_particle, iteration):
global Tracker, Blockdata
# do resolution each time
keepgoing = 1
if(Blockdata["myid"] == Blockdata["nodes"][0]):
if not os.path.exists(os.path.join(Tracker["directory"], "tempdir")): os.mkdir(os.path.join(Tracker["directory"], "tempdir"))
do3d_sorting_group_insertion_random_two_for_fsc(data, paramstructure, norm_per_particle)
mpi_barrier(MPI_COMM_WORLD)
fsc143 = 0
fsc05 = 0
Tracker["fsc143"] = 0
Tracker["fsc05"] = 0
res_05 = Tracker["number_of_groups"]*[0]
res_143 = Tracker["number_of_groups"]*[0]
for index_of_colors in xrange(Blockdata["no_of_groups"]):
group_start, group_end = MPI_volume_start_end(Tracker["number_of_groups"], Blockdata["no_of_groups"], index_of_colors)
if Blockdata["color"] == index_of_colors: # It has to be 1 to avoid problem with tvol1 not closed on the disk
for index_of_group in xrange(group_start, group_end):
if Blockdata["myid_on_node"] == 0:
#print(" odd group %d"%index_of_group)
tvol0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_0_0_%d.hdf")%index_of_group)
tweight0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_0_0_%d.hdf")%index_of_group)
tvol1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_1_0_%d.hdf")%index_of_group)
tweight1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_1_0_%d.hdf")%index_of_group)
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
send_EMData(tweight1, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
shrank0 = stepone(tvol0, tweight0)
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
#print(" odd group %d"%index_of_group)
tag = 7007
tvol1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tweight1 = recv_EMData(0, tag, Blockdata["shared_comm"])
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
if Blockdata["myid_on_node"] == 1:
tvol0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_0_1_%d.hdf")%index_of_group)
tweight0 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_0_1_%d.hdf")%index_of_group)
tvol1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tvol_1_1_%d.hdf")%index_of_group)
tweight1 = get_im(os.path.join(Tracker["directory"], "tempdir", "tweight_1_1_%d.hdf")%index_of_group)
Util.fuse_low_freq(tvol0, tvol1, tweight0, tweight1, 2*Tracker["fuse_freq"])
tag = 7007
send_EMData(tvol1, Blockdata["no_of_processes_per_group"]-2, tag, Blockdata["shared_comm"])
send_EMData(tweight1, Blockdata["no_of_processes_per_group"]-2, tag, Blockdata["shared_comm"])
shrank0 = stepone(tvol0, tweight0)
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-2:
#print(" even group %d"%index_of_group)
tag = 7007
tvol1 = recv_EMData(1, tag, Blockdata["shared_comm"])
tweight1 = recv_EMData(1, tag, Blockdata["shared_comm"])
tvol1.set_attr_dict( {"is_complex":1, "is_fftodd":1, 'is_complex_ri': 1, 'is_fftpad': 1} )
shrank1 = stepone(tvol1, tweight1)
mpi_barrier(Blockdata["shared_comm"])
if Blockdata["myid_on_node"] == 0:
tag = 7007
send_EMData(shrank0, Blockdata["no_of_processes_per_group"]-1, tag, Blockdata["shared_comm"])
del shrank0
lcfsc = 0
elif Blockdata["myid_on_node"] == Blockdata["no_of_processes_per_group"]-1:
#print(" now we do fsc odd ")
tag = 7007
shrank0 = recv_EMData(0, tag, Blockdata["shared_comm"])
cfsc = fsc(shrank0, shrank1)[1]
write_text_row(cfsc, os.path.join(Tracker["directory"], "fsc_driver_chunk0_grp%03d_iter%03d.txt")%(index_of_group,iteration))
del shrank0, shrank1
if(Tracker["nxinit"]/status
try:
t = open(_proc_status)
v = t.read()
t.close()
except:
return 0.0 # non-Linux?
# get VmKey line e.g. 'VmRSS: 9999 kB\n ...'
i = v.index(VmKey)
v = v[i:].split(None, 3) # whitespace
if len(v) < 3:
return 0.0 # invalid format?
# convert Vm value to bytes
return float(v[1]) * _scale[v[2]]
def memory(since=0.0):
'''Return memory usage in bytes.
'''
return _VmB('VmSize:') - since
def resident(since=0.0):
'''Return resident memory usage in bytes.
'''
return _VmB('VmRSS:') - since
def stacksize(since=0.0):
'''Return stack size in bytes.
'''
return _VmB('VmStk:') - since
def memory_check(s="check_memory"):
import os
print(s)
print(s +" memory ", memory()/1.e9)
print(s +" resident ", resident()/1.e9)
print(s +" stacksize ", stacksize()/1.e9)
####<<<----do final maps ---->>>
def do_final_maps(number_of_groups, minimum_size, selected_iter, refinement_dir, masterdir, rec3d_image_size, log_main):
global Tracker, Blockdata
import shutil
from shutil import copyfile
for icluster in xrange(number_of_groups):
clusterdir = os.path.join(masterdir, "Cluster%d"%icluster)
if os.path.exists(clusterdir):
if Blockdata["myid"] == icluster: shutil.rmtree(clusterdir)
mpi_barrier(MPI_COMM_WORLD)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "------->>>>>>>Check memory <<<<----------"
log_main.add(msg)
print(line, msg)
basic_memory_per_cpu = 1.0
total_data_in_mem = Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"]*Tracker["constants"]["total_stack"]*4./1.e9
one_volume_in_mem = Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"]*Tracker["constants"]["nnxo"]*4.*8./1.e9
nproc_do_final_per_node =(Tracker["constants"]["memory_per_node"] - total_data_in_mem -1.0)/(basic_memory_per_cpu + one_volume_in_mem)
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "total mem per node: %5.1f G"%Tracker["constants"]["memory_per_node"]
log_main.add(msg)
print(line, msg)
nproc_do_final_per_node = int(nproc_do_final_per_node)
if nproc_do_final_per_node > Blockdata["nproc"] //Blockdata["no_of_groups"]:
nproc_do_final_per_node = Blockdata["nproc"] //Blockdata["no_of_groups"]
if Blockdata["nproc_previous"] > 0: nproc_do_final_per_node = min(nproc_do_final_per_node, Blockdata["nproc_previous"]//Blockdata["no_of_groups"])
ncpu_per_node = min(minimum_size//5//Blockdata["no_of_groups"]//2, nproc_do_final_per_node)
ncpu_per_node = max(ncpu_per_node, 2)
if( Blockdata["myid"] == Blockdata["main_node"]):
msg = "CPUs to be used per node: %d"%ncpu_per_node
log_main.add(msg)
print(line, msg)
Blockdata["ncpuspernode"] = ncpu_per_node
Blockdata["nsubset"] = Blockdata["ncpuspernode"]*Blockdata["no_of_groups"]
create_subgroup()
fuse_freq = Tracker["fuse_freq"] # sort does it already
mask3D = Tracker["mask3D"]
mtf = Tracker["constants"]["mtf"]
fsc_adj = Tracker["constants"]["fsc_adj"]
Bstart = Tracker["constants"]["B_start"]
Bstop = Tracker["constants"]["B_stop"]
aa = Tracker["constants"]["aa"]
postlowpassfilter = Tracker["constants"]["postlowpassfilter"]
B_enhance = Tracker["constants"]["B_enhance"]
Tracker["number_of_groups"] = number_of_groups
if Tracker["nosmearing"]:
if(Blockdata["myid"] == Blockdata["main_node"]):
map_dir = os.path.join(masterdir, "maps_dir")
os.mkdir(map_dir)
else:map_dir = 0
map_dir = wrap_mpi_bcast(map_dir, Blockdata["main_node"], MPI_COMM_WORLD)
Tracker["directory"] = map_dir
Tracker["nxinit"] = Tracker["constants"]["nnxo"]
compute_noise(Tracker["nxinit"])
data = get_shrink_data_sorting(os.path.join(Tracker["constants"]["masterdir"], "final_partition.txt"), \
os.path.join(Tracker["constants"]["masterdir"],"refinement_parameters.txt"), \
return_real = False, preshift = True, apply_mask = False)
do3d_sorting_groups_trl_iter(data, 0)
del data
if(Blockdata["myid"] == Blockdata["main_node"]):
for icluster in xrange(number_of_groups):
copyfile(os.path.join(Tracker["directory"], "vol_unfiltered_0_grp%03d_iter000.hdf"%icluster), \
os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_0_grp%03d.hdf"%icluster))
copyfile(os.path.join(Tracker["directory"], "vol_unfiltered_1_grp%03d_iter000.hdf"%icluster), \
os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_1_grp%03d.hdf"%icluster))
shutil.rmtree(map_dir)
mpi_barrier(MPI_COMM_WORLD)
else:
for icluster in xrange(Tracker["number_of_groups"]):
cluster_masterdir = os.path.join(masterdir,"Cluster%d"%icluster)
if(Blockdata["myid"] == Blockdata["main_node"]):
if not os.path.exists(cluster_masterdir): os.mkdir(cluster_masterdir)
mpi_barrier(MPI_COMM_WORLD)
do_ctrefromsort3d_get_subset_data(cluster_masterdir, refinement_dir, \
os.path.join(masterdir,"Cluster_%03d.txt"%icluster), selected_iter, None, Blockdata["subgroup_comm"])
Tracker["constants"]["small_memory"] = False
ctrefromsorting_rec3d_faked_iter(cluster_masterdir, selected_iter, rec3d_image_size, Blockdata["subgroup_comm"])
mpi_barrier(MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
Tracker["constants"]["B_enhance"] = B_enhance
Tracker["constants"]["B_start"] = Bstart
Tracker["constants"]["B_stop"] = Bstop
Tracker["constants"]["aa"] = aa
Tracker["constants"]["postlowpassfilter"] = postlowpassfilter
Tracker["constants"]["fsc_adj"]=fsc_adj
Tracker["constants"]["mtf"] = mtf
Tracker["mask3D"] = mask3D
Tracker["nxinit"] = rec3d_image_size
Tracker["number_of_groups"] = number_of_groups
Tracker["fuse_freq"] = fuse_freq # reset
# Using all CPUS to do step two
Blockdata["ncpuspernode"] = Blockdata["nproc"]//Blockdata["no_of_groups"]
Blockdata["nsubset"] = Blockdata["ncpuspernode"]*Blockdata["no_of_groups"]
create_subgroup()
do3d_sorting_groups_rec3d(selected_iter, masterdir, log_main)
if(Blockdata["myid"] == Blockdata["main_node"]):
for icluster in xrange(Tracker["number_of_groups"]):
cluster_masterdir = os.path.join(masterdir,"Cluster%d"%icluster)
if os.path.exists(cluster_masterdir): shutil.rmtree(cluster_masterdir)
return
#####<<<<--------------------------------------
def merge_two_unfiltered_maps(map1_file, map2_file, cluster_ID):
global Tracker, Blockdata
# single processor only
from math import sqrt, log
try: map1 = get_im(map1_file)
except: ERROR("Sphire postprocess fails to read the first map " + map1_file, "--postprocess option for 3-D", 1, Blockdata["myid"])
try: map2 = get_im(map2_file)
except: ERROR("Sphire postprocess fails to read the second map " + map2_file, "--postprocess option for 3-D", 1, Blockdata["myid"])
if (map2.get_xsize() != map1.get_xsize()) or (map2.get_ysize() != map1.get_ysize()) or (map2.get_zsize() != map1.get_zsize()):
ERROR(" Two input maps have different image size", "--postprocess option for 3-D", 1, Blockdata["myid"])
if Tracker["mask3D"]:
mask3D = get_im(Tracker["mask3D"])
if mask3D.get_xsize() != Tracker["nxinit"]:
mask3D = fdecimate(mask3D, Tracker["nxinit"], Tracker["nxinit"], Tracker["nxinit"], True, False)
else: mask3D = None
## prepare FSC
resolution_FSC143 = 0.5 # for single volume, this is the default resolution
resolution_FSChalf = 0.5
if mask3D: fsc_true = fsc(map1*mask3D, map2*mask3D, 1)
else: fsc_true = fsc(map1, map2, 1)
resolution_in_angstrom = [None]*len(fsc_true[0])
for ifreq in xrange(len(fsc_true[0])):
if fsc_true[0][ifreq] !=0.0: resolution_in_angstrom [ifreq] = Tracker["constants"]["pixel_size"]/fsc_true[0][ifreq]
else: resolution_in_angstrom [ifreq] = 0.0
fsc_true[1][0] =1.0 # always reset fsc of zero frequency as 1.0
for ifreq in xrange(len(fsc_true[0])):
fsc_true[1][ifreq] = fsc_true[1][ifreq]*2./(1.+fsc_true[1][ifreq])
resolution_FSC143_right = 0.0
resolution_FSC143_left = 0.0
dip_at_fsc = False
nfreq0 = 1
for ifreq in xrange(1, len(fsc_true[1])):
if fsc_true[1][ifreq] < 0.0:
nfreq0 = ifreq - 1
break
if nfreq0 ==1: nfreq0= len(fsc_true[1]) - 1
nfreq05 = len(fsc_true[1])-1
for ifreq in xrange(1, len(fsc_true[1])):
if fsc_true[1][ifreq] < 0.5:
resolution_FSChalf = fsc_true[0][ifreq-1]
nfreq05 = ifreq-1
break
resolution_FSC143_left = fsc_true[0][len(fsc_true[1])-1]
for ifreq in xrange(nfreq05, len(fsc_true[1])):
if fsc_true[1][ifreq] < 0.143:
resolution_FSC143_left = fsc_true[0][ifreq-1]
nfreq143 = ifreq - 1
break
#print(nfreq0, nfreq05, "check")
nfreq143_right = nfreq0
resolution_FSC143_right = fsc_true[0][nfreq05]
for ifreq in xrange(len(fsc_true[0])):fsc_true[1][ifreq] = max(fsc_true[1][ifreq], 0.0)
for ifreq in xrange(nfreq0, nfreq05, -1):
if fsc_true[1][ifreq] >= 0.143:
resolution_FSC143_right = fsc_true[0][ifreq]
nfreq143_right = ifreq
break
resolution_FSC143 = resolution_FSC143_right
nfreq143 = nfreq143_right
## smooth FSC after FSC143 and set other values to zero
for ifreq in xrange(nfreq143+1, len(fsc_true[1])):
if ifreq ==nfreq143+1: fsc_true[1][ifreq] = (fsc_true[1][nfreq143-2] + fsc_true[1][nfreq143-1])/5.
elif ifreq ==nfreq143+2: fsc_true[1][ifreq] = (fsc_true[1][nfreq143-1])/5.
else: fsc_true[1][ifreq] = 0.0
fsc_out = []
for ifreq in xrange(len(fsc_true[0])): fsc_out.append("%5d %7.2f %7.3f"%(ifreq, resolution_in_angstrom[ifreq],fsc_true[1][ifreq]))
write_text_file(fsc_out, "fsc_%d.txt"%cluster_ID)
map1 +=map2 #(get_im(args[0])+get_im(args[1]))/2.0
map1 /=2.0
del map2
outtext = [["Squaredfreq"],[ "LogOrig"]]
guinierline = rot_avg_table(power(periodogram(map1),.5))
for ig in xrange(len(guinierline)):
x = ig*.5/float(len(guinierline))/Tracker["constants"]["pixel_size"]
outtext[0].append("%10.6f"%(x*x))
outtext[1].append("%10.6f"%log(guinierline[ig]))
if Tracker["constants"]["mtf"]: # divided by the mtf #1
log_main.add("MTF correction is applied")
log_main.add("MTF file is %s"%Tracker["constants"]["mtf"])
try: mtf_core = read_text_file(Tracker["constants"]["mtf"], -1)
except: ERROR("Sphire postprocess fails to read MTF file "+Tracker["constants"]["mtf"], "--postprocess option for 3-D", 1)
map1 = fft(Util.divide_mtf(fft(map1), mtf_core[1], mtf_core[0]))
outtext.append(["LogMTFdiv"])
guinierline = rot_avg_table(power(periodogram(map1),.5))
for ig in xrange(len(guinierline)): outtext[-1].append("%10.6f"%log(guinierline[ig]))
if Tracker["constants"]["fsc_adj"]: #2
#### FSC adjustment ((2.*fsc)/(1+fsc)) to the powerspectrum;
fil = len(fsc_true[1])*[None]
for i in xrange(len(fil)): fil[i] = sqrt(fsc_true[1][i]) # fsc already matched to full dataset
map1 = filt_table(map1,fil)
guinierline = rot_avg_table(power(periodogram(map1),.5))
outtext.append(["LogFSCadj"])
for ig in xrange(len(guinierline)):outtext[-1].append("%10.6f"%log(guinierline[ig]))
if Tracker["constants"]["B_enhance"] !=-1: #3 One specifies and then apply B-factor sharpen
if Tracker["constants"]["B_enhance"] == 0.0: # auto mode
cutoff_by_fsc = 0
for ifreq in xrange(len(fsc_true[1])):
if fsc_true[1][ifreq]<0.143: break
cutoff_by_fsc = float(ifreq-1)
freq_max = cutoff_by_fsc/(2.*len(fsc_true[0]))/Tracker["constants"]["pixel_size"]
guinierline = rot_avg_table(power(periodogram(map1),.5))
logguinierline = []
for ig in xrange(len(guinierline)):logguinierline.append(log(guinierline[ig]))
freq_min = 1./Tracker["constants"]["B_start"] # given frequencies in Angstrom unit, say, B_start is 10 Angstrom, or 15 Angstrom
if Tracker["constants"]["B_stop"]!=0.0:
freq_max = 1./Tracker["constants"]["B_stop"]
B_stop = Tracker["constants"]["B_stop"]
else: B_stop = Tracker["constants"]["pixel_size"]/(float(nfreq143)/Tracker["constants"]["nnxo"])
if freq_min>= freq_max:
print("Your B_start is too high!")
freq_min = 1./(B_stop + 8.)
b, junk, ifreqmin, ifreqmax = compute_bfactor(guinierline, freq_min, freq_max, Tracker["constants"]["pixel_size"])
global_b = min(4.*b, 400.) # B-factor should not be too large
cc = pearson(junk[1],logguinierline)
sigma_of_inverse = sqrt(2./(global_b/Tracker["constants"]["pixel_size"]**2))
else: # User provided value
sigma_of_inverse = sqrt(2./((abs(Tracker["constants"]["B_enhance"]))/Tracker["constants"]["pixel_size"]**2))
global_b = Tracker["constants"]["B_enhance"]
map1 = filt_gaussinv(map1, sigma_of_inverse)
guinierline = rot_avg_table(power(periodogram(map1),.5))
last_non_zero = -999.0
for ig in xrange(len(guinierline)):
if guinierline[ig]>0:
outtext[-1].append("%10.6f"%log(guinierline[ig]))
last_non_zero = log(guinierline[ig])
else: outtext[-1].append("%10.6f"%last_non_zero)
lowpassfilter = 0.0
if Tracker["constants"]["postlowpassfilter"] != 0.0: # User provided low-pass filter #4.
if Tracker["constants"]["postlowpassfilter"] > 0.5: # Input is in Angstrom
map1 = filt_tanl(map1,Tracker["constants"]["pixel_size"]/Tracker["constants"]["postlowpassfilter"], min(Tracker["constants"]["aa"],.1))
lowpassfilter = Tracker["constants"]["pixel_size"]/Tracker["constants"]["postlowpassfilter"]
elif Tracker["constants"]["postlowpassfilter"]>0.0 and Tracker["constants"]["postlowpassfilter"]<0.5: # input is in absolution frequency
map1 = filt_tanl(map1,Tracker["constants"]["postlowpassfilter"], min(Tracker["constants"]["aa"],.1))
lowpassfilter = Tracker["constants"]["postlowpassfilter"]
else:
map1 = filt_tanl(map1,resolution_FSC143, Tracker["constants"]["aa"])
lowpassfilter = resolution_FSC143
msg = "Cluster %3d applied B_factor %10.6f applied low_pass_filter_to %10.6f "%(cluster_ID, global_b, Tracker["constants"]["pixel_size"]/lowpassfilter)
msg +=" FSC05/FSC143 %5d/%5d %6.3f/%6.3f"%(nfreq05, nfreq143, Tracker["constants"]["pixel_size"]*Tracker["constants"]["nnxo"]/float(nfreq05), \
Tracker["constants"]["pixel_size"]*Tracker["constants"]["nnxo"]/float(nfreq143))
print(msg)
map1.write_image(os.path.join(Tracker["constants"]["masterdir"], "vol_final_nomask_cluster%03d.hdf"%cluster_ID))
if mask3D: map1 *=mask3D
map1.write_image(os.path.join(Tracker["constants"]["masterdir"], "vol_final_cluster%03d.hdf"%cluster_ID))
if mask3D: del mask3D
del map1
return msg
def main():
from optparse import OptionParser
from global_def import SPARXVERSION
from EMAN2 import EMData
from logger import Logger, BaseLogger_Files
from global_def import ERROR
import sys, os, time, shutil
global Tracker, Blockdata
progname = os.path.basename(sys.argv[0])
usage = progname + " stack outdir --refinement_dir=masterdir_of_sxmeridien --mask3D=mask.hdf --focus=binarymask.hdf --radius=outer_radius " +\
" --sym=c1 --nindependent=indenpendent_runs --img_per_grp=img_per_grp "
parser = OptionParser(usage,version=SPARXVERSION)
parser.add_option("--refinement_dir", type ="string", default ='', help="3-D refinement directory, the master directory of sxmeridien")
parser.add_option("--output_dir", type ="string", default ='', help="name of the directory for sorting computing")
parser.add_option("--niter_for_sorting", type ="int", default =-1, help="selected number of iteration of meridien refinement for sorting, -1 implies program uses the best iteration to initiate sorting")
parser.add_option("--focus", type ="string", default ='', help="file name, the bineary 3D mask for focused clustering ")
parser.add_option("--mask3D", type ="string", default ='', help="file name, the 3-D global mask for clustering ")
parser.add_option("--instack", type ="string", default ='', help="file name, data stack for sorting provided by user. It applies when sorting starts from a given data stack")
parser.add_option("--radius", type ="int", default =-1, help="particle radius in pixel for rotational correlation >> SORT3D <<<-----"]
###--------------------------------------------------------------------------------------------
# Two typical sorting scenarios
#
# 1. import data and refinement parameters from meridien refinement;
# 2. given data stack and xform.projection/ctf in header(For simulated test data);
#<<<---------------------->>>imported functions<<<---------------------------------------------
from statistics import k_means_match_clusters_asg_new,k_means_stab_bbenum
from utilities import get_im,bcast_number_to_all,cmdexecute,write_text_file,read_text_file,wrap_mpi_bcast, get_params_proj, write_text_row
from utilities import get_number_of_groups
from filter import filt_tanl
from time import sleep
from logger import Logger,BaseLogger_Files
import string
from string import split, atoi, atof
import json
import user_functions
####--------------------------------------------------------------
main_keepgoing = 1
main_number_of_clusters = 0
# Create Master directory
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
masterdir = Tracker["constants"]["masterdir"]
if(Blockdata["myid"] == Blockdata["main_node"]):
print(line, "Sort3d starts")
if not masterdir:
timestring = strftime("_%d_%b_%Y_%H_%M_%S", localtime())
masterdir ="sort3d"+timestring
os.mkdir(masterdir)
else:
if not os.path.exists(masterdir): os.mkdir(masterdir)
li =len(masterdir)
else:li = 0
li = mpi_bcast(li,1,MPI_INT,Blockdata["main_node"],MPI_COMM_WORLD)[0]
masterdir = mpi_bcast(masterdir,li,MPI_CHAR,Blockdata["main_node"],MPI_COMM_WORLD)
masterdir = string.join(masterdir,"")
Tracker["constants"]["masterdir"] = masterdir
Tracker["constants"]["chunk_0"] = os.path.join(Tracker["constants"]["masterdir"],"chunk_0.txt")
Tracker["constants"]["chunk_1"] = os.path.join(Tracker["constants"]["masterdir"],"chunk_1.txt")
log_main = Logger(BaseLogger_Files())
log_main.prefix = Tracker["constants"]["masterdir"]+"/"
import_from_relion_refinement = 0
import_from_sparx_refinement = 0
import_from_data_stack = 0
total_stack = 0
while not os.path.exists(Tracker["constants"]["masterdir"]):
print("Node ", Blockdata["myid"], "waiting...", Tracker["constants"]["masterdir"])
sleep(1)
mpi_barrier(MPI_COMM_WORLD)
if(Blockdata["myid"] == Blockdata["main_node"]):
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "importing data ... "
print(line, msg)
log_main.add(msg)
######### Extract refinement information
if Tracker["constants"]["refinement_method"] =="SPARX": # Senario one
import_from_sparx_refinement = get_input_from_sparx_ref3d(log_main)
Tracker["smearing"] = True
else: # Senario three, sorting from a given data stack, general cases
#if not Tracker["constants"]["nofinal_sharpen"]: Tracker["constants"]["nofinal_sharpen"] = True
import_from_data_stack = get_input_from_datastack(log_main)
Tracker["constants"]["hardmask"] = True
Tracker["applybckgnoise"] = False
Tracker["applymask"] = True
Tracker["smearing"] = False
###<<<------------------------>>>>>>checks<<<<<-------------
if Tracker["constants"]["symmetry"] != options.sym:
if(Blockdata["myid"] == Blockdata["main_node"]):
msg = "input symmetry %s is altered to %s after reading refinement information! "%(options.sym, Tracker["constants"]["symmetry"])
log_main.add(msg)
print(msg)
###<<<----------------------->>>>> SORT3D MAIN PROGRAM <<<<<---------------------------------------------# For all cases
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if(Blockdata["myid"] == Blockdata["main_node"]):
print(line, "Sort3d main program")
log_main.add("---------->>>SPARX sort3d<<<--------------")
log_main.add("The shell line command:")
line = ""
for a in sys.argv: line +=(a + " ")
log_main.add(line)
log_main.add("Sort3d master directory: %s"%Tracker["constants"]["masterdir"])
print_dict(Tracker["constants"],"Permanent sorting settings after initialization")
mpi_barrier(MPI_COMM_WORLD)
####--->>>>>> Check 3-D mask<<<<----------
if Tracker["constants"]["mask3D"]:Tracker["mask3D"] = os.path.join(Tracker["constants"]["masterdir"],"smask.hdf")
else: Tracker["mask3D"] = None
if Tracker["constants"]["focus3Dmask"]:Tracker["focus3D"] = Tracker["constants"]["focus3Dmask"]
else: Tracker["focus3D"] = None
if(Blockdata["myid"] == Blockdata["main_node"]):
bad_focus3Dmask = 0
if Tracker["constants"]["focus3Dmask"]:
try:
focusmask = get_im(Tracker["constants"]["focus3Dmask"])
st = Util.infomask(binarize(focusmask), None, True)
if(st[0] == 0.0): bad_focus3Dmask = 1
else:
focusmask.write_image(os.path.join(Tracker["constants"]["masterdir"], "focus3d.hdf"))
Tracker["focus3D"] = os.path.join(Tracker["constants"]["masterdir"], "focus3d.hdf")
except: bad_focus3Dmask = 1
else: bad_focus3Dmask = 0
bad_focus3Dmask = bcast_number_to_all(bad_focus3Dmask, source_node = Blockdata["main_node"])
if bad_focus3Dmask: ERROR("Incorrect focused mask, after binarize all values zero","sxsort3d.py", 1, Blockdata["myid"])
if(Blockdata["myid"] == Blockdata["main_node"]):
bad_3Dmask = 0
if Tracker["constants"]["mask3D"]:
try:
mask3D = get_im(Tracker["constants"]["mask3D"])
st = Util.infomask(binarize(mask3D), None, True)
if (st[0] ==0.0): bad_3Dmask = 1
else:
mask3D.write_image(os.path.join(Tracker["constants"]["masterdir"], "mask3D.hdf"))
Tracker["mask3D"]= os.path.join(Tracker["constants"]["masterdir"], "mask3D.hdf")
except: bad_3Dmask = 1
else: bad_3Dmask = 0
bad_3Dmask = bcast_number_to_all(bad_focus3Dmask, source_node = Blockdata["main_node"])
if bad_3Dmask: ERROR("Incorrect 3D mask", "sxsort3d.py", 1, Blockdata["myid"])
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
############################################################################################
###<<<------ Determine the image size ### reset nxinit simply for calculate currentres!
Tracker["nxinit"] = Tracker["nxinit_refinement"]
Tracker["currentres"] = float(Tracker["constants"]["fsc05"])/float(Tracker["nxinit"])
##################--------------->>>>>> shrinkage, current resolution, fuse_freq <<<<<<------------------------------------------
Tracker["total_stack"] = Tracker["constants"]["total_stack"]
Tracker["shrinkage"] = float(Tracker["nxinit"])/Tracker["constants"]["nnxo"]
Tracker["radius"] = Tracker["constants"]["radius"]*Tracker["shrinkage"]
try: fuse_freq = Tracker["fuse_freq"]
except: Tracker["fuse_freq"] = int(Tracker["constants"]["pixel_size"]*Tracker["constants"]["nnxo"]/Tracker["constants"]["fuse_freq"]+0.5)
###################------------------->>>>> 3-D masks <<<<<-----------------------------------------------------------
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if(Blockdata["myid"] == Blockdata["main_node"]):
print_dict(Tracker["constants"],"Permanent sorting settings from input options")
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'w')
json.dump(Tracker, fout)
fout.close()
msg = "---------->>>SORT3D<<<-----------"
log_main.add(msg)
print(line, msg)
msg = "orgstack: %s"%Tracker["constants"]["orgstack"]
print(line, msg)
log_main.add(msg)
msg ="image comparison method: %s"%Tracker["constants"]["comparison_method"]
print(line, msg)
log_main.add(msg)
msg ="3-D reconstruction method: %s"%Tracker["constants"]["interpolation"]
print(line, msg)
log_main.add(msg)
if Tracker ["constants"]["focus3Dmask"]:
msg ="User provided focus mask file: %s"%Tracker ["constants"]["focus3Dmask"]
print(line, msg)
log_main.add(msg)
Tracker["full_list"] = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"indexes.txt"), -1) # could have one or two columns
if len(Tracker["full_list"]) == 2: Tracker["full_list"] = Tracker["full_list"][1] # First column should be always group assignment
elif len(Tracker["full_list"]) == 1: Tracker["full_list"] = Tracker["full_list"][0]
else: ERROR("The original particle ID for sorting has wrong format", "sxsort3d.py", 1, Blockdata["main_node"])
else: Tracker["full_list"] = 0
Tracker["full_list"] = wrap_mpi_bcast(Tracker["full_list"], Blockdata["main_node"], MPI_COMM_WORLD)
Tracker["shrinkage"] = float(Tracker["nxinit"])/Tracker["constants"]["nnxo"]
if(Blockdata["myid"] == Blockdata["main_node"]): print_dict(Tracker,"Current sorting settings")
Tracker["ratio"] = 0
### Check mpi setting
check_mpi_settings(log_main)
if options.post_sorting_sharpen: # post sorting option!
try: nxinit = Tracker["nxinit"]
except: Tracker["nxinit"] = -1
Tracker["constants"]["orgres"] = 0.0
Tracker["constants"]["refinement_delta"] = 0.0
Tracker["constants"]["refinement_ts"] = 0.0
Tracker["constants"]["refinement_xr"] = 0.0
Tracker["constants"]["refinement_an"] = 0.0
if(Blockdata["myid"] == Blockdata["main_node"]):
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'r')
Tracker = convert_json_fromunicode(json.load(fout))
fout.close()
try: output = Tracker["output"]
except: Tracker["output"] = []
else: Tracker = []
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
number_of_groups = 0
minimum_size = Tracker["constants"]["img_per_grp"]
if(Blockdata["myid"] == Blockdata["main_node"]):
final_accounted_ptl = 0
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "---->>> Summaries of final results <<<-----"
print(line, msg)
Tracker["output"].append(msg)
while os.path.exists(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups)):
class_in = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups))
minimum_size = min(len(class_in), minimum_size)
msg = " %10d clusters %10d group size "%(number_of_groups, len(class_in))
print(line, msg)
Tracker["output"].append(msg)
number_of_groups +=1
final_accounted_ptl +=len(class_in)
msg = "total number of particle images: %10d; accounted: %10d ; number_of_groups: %5d"%(Tracker["constants"]["total_stack"], final_accounted_ptl, number_of_groups)
print(line, msg)
Tracker["output"].append(msg)
number_of_groups = bcast_number_to_all(number_of_groups, Blockdata["main_node"], MPI_COMM_WORLD)
if number_of_groups == 0:ERROR("No cluster is found, and the program terminates. ", "option post_sorting_sharpen ", 1, Blockdata["myid"])
minimum_size = bcast_number_to_all(minimum_size, Blockdata["main_node"], MPI_COMM_WORLD)
compute_noise(Tracker["constants"]["nnxo"])
do_final_maps(number_of_groups, minimum_size, Tracker["constants"]["selected_iter"], Tracker["constants"]["refinement_dir"], \
Tracker["constants"]["masterdir"], Tracker["constants"]["nnxo"], log_main)
if(Blockdata["myid"] == Blockdata["main_node"]):
for iproc in xrange(number_of_groups):
msg = merge_two_unfiltered_maps(os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_0_grp%03d.hdf"%iproc), \
os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_1_grp%03d.hdf"%iproc), iproc)
Tracker["output"].append(msg)
write_text_file(Tracker["output"], os.path.join(Tracker["constants"]["masterdir"], "final.txt"))
mpi_barrier(MPI_COMM_WORLD)
from mpi import mpi_finalize
mpi_finalize()
exit()
mpi_barrier(MPI_COMM_WORLD)
###<<<------- ++++++++++ sort3d starts here +++++++++++ ----------
for indep_sort3d in xrange(Tracker["total_sort3d_indepent_run"]):
sorting = {}
sorting["Unaccounted"] = None
sorting["Accounted"] = []
partid_file = os.path.join(Tracker["constants"]["masterdir"],"indexes.txt")
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
Tracker["indep_sort3d_dir"] = os.path.join(Tracker["constants"]["masterdir"], "sort3d_run%d"%indep_sort3d)
if Blockdata["myid"] == Blockdata["main_node"]:
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
if not os.path.exists(Tracker["indep_sort3d_dir"]): os.mkdir(Tracker["indep_sort3d_dir"])
msg = "---------->>> Independent sort3d %d <<<----------- "%indep_sort3d
print(line, msg)
Tracker["output"].append("++++ SORT3D %d"%indep_sort3d)
log_main.add(msg)
log_main.add("nnxo : %d"%Tracker["constants"]["nnxo"])
log_main.add("Current resolution: %f absolute unit(maximum is 0.5) 1./%7.2f Angstrom "%(Tracker["currentres"]*Tracker["shrinkage"],\
Tracker["constants"]["pixel_size"]/Tracker["currentres"]/Tracker["shrinkage"]))
if Tracker["mask3D"]:
msg = "User provided 3-D mask: %s"%Tracker["constants"]["mask3D"]
log_main.add(msg)
print(line, msg)
if import_from_sparx_refinement ==1:
msg = "Sorting is initiated from meridien refinement"
print(line, msg)
log_main.add(msg)
elif import_from_data_stack ==1:
msg = "Sorting is initiated from data stack"
print(line, msg)
log_main.add(msg)
else: ERROR("importing neither from a meridien refinement nor a given data stack", "sort3d", 1, Blockdata["myid"])
sorting["total"] = read_text_file(partid_file, -1)
if len(sorting["total"])>1: sorting["total"] = sorting["total"][1]
else: sorting["total"] = sorting["total"][0]
else:
sorting["total"] = 0
Tracker = 0
sorting["total"] = wrap_mpi_bcast(sorting["total"], Blockdata["main_node"]) # total number of records in indexes.txt file
if Blockdata["myid"] == Blockdata["main_node"]:
Tracker["img_per_grp"] = Tracker["constants"]["img_per_grp"]
Tracker["total_stack"] = len(sorting["total"]) # start from beginning
msg = "img_per_grp: %d"%Tracker["img_per_grp"]
print(line, msg)
log_main.add(msg)
Tracker["number_of_groups"] = get_number_of_groups(Tracker["total_stack"],Tracker["img_per_grp"])
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
final_list = do_EQKmeans_nways_clustering_stable_seeds(Tracker["indep_sort3d_dir"], partid_file, \
os.path.join(Tracker["constants"]["masterdir"],"refinement_parameters.txt"), sorting["Accounted"], log_main)
sorting["Unaccounted"] = Tracker["unaccounted_list"]
if Blockdata["myid"] == Blockdata["main_node"]:
fout = open(os.path.join(Tracker["indep_sort3d_dir"], "sorting.json"),'w')
json.dump(sorting, fout)
fout.close()
sort3d_clusters = split_partition_into_clusters(sorting["Accounted"])
sort3d_account_list, sort3d_res_partition_list = merge_classes_into_partition_list(sort3d_clusters)
write_text_row(sort3d_res_partition_list, os.path.join(os.path.join(Tracker["indep_sort3d_dir"], "sort3d_partition.txt")))
Tracker["number_of_groups"] = len(sort3d_clusters)
for icluster in xrange(len(sort3d_clusters)): write_text_file(sort3d_clusters[icluster], os.path.join(Tracker["indep_sort3d_dir"],"Cluster%d.txt"%icluster))
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
###>>>>>--------->>>>rec3D<<<---------
Tracker["nxinit"] = Tracker["nxinit_refinement"] # report resolution and structure in refinement image size
data, norm_per_particle = get_shrink_data_sorting_smearing(os.path.join(Tracker["indep_sort3d_dir"], "sort3d_partition.txt"), \
os.path.join(Tracker["constants"]["masterdir"],"refinement_parameters.txt"), \
return_real = False, preshift = True, apply_mask = False)
if Tracker["nosmearing"]: parameterstructure = None
else: parameterstructure = read_paramstructure_for_sorting(os.path.join(Tracker["indep_sort3d_dir"], "sort3d_partition.txt"),\
Tracker["paramstructure_dict"], Tracker["paramstructure_dir"])
Tracker["directory"] = Tracker["indep_sort3d_dir"]
do3d_sorting_groups_trl_smearing_iter(data, parameterstructure, norm_per_particle, 0, True)
del data
if not Tracker["nosmearing"]:
del parameterstructure
del norm_per_particle
###---->>>>>> Summary of results given by sort3d <<<<---------
if(Blockdata["myid"] == Blockdata["main_node"]):
if os.path.exists(os.path.join(Tracker["directory"], "tempdir")): shutil.rmtree(os.path.join(Tracker["directory"], "tempdir"))
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "--->>>Summary of indepenent sort3d run %d<<<----"%indep_sort3d
print(line, msg)
log_main.add(msg)
for icluster in xrange(Tracker["number_of_groups"]):
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
cfsc = read_text_file(os.path.join(Tracker["indep_sort3d_dir"],"fsc_driver_grp%03d_iter000.txt"%icluster), -1)
res05, res143 = get_res(cfsc[0])
msg = "group %3d size : %8d FSC05 %f FSC143 %f"%(icluster, len(sort3d_clusters[icluster]), \
float(res05)/float(Tracker["nxinit"]), float(res143)/float(Tracker["nxinit"]))
print(line, msg)
log_main.add(msg)
Tracker["output"].append(msg)
else: Tracker["output"] = 0
Tracker["output"] = wrap_mpi_bcast(Tracker["output"], Blockdata["main_node"])
mpi_barrier(MPI_COMM_WORLD)
if(Blockdata["myid"] == Blockdata["main_node"]):
Tracker["output"].append("----->>>Summaries of SORT3D <<<-------")
ptp = []
for indep_sort3d in xrange(Tracker["total_sort3d_indepent_run"]):
indep_sort3d_dir = os.path.join(Tracker["constants"]["masterdir"], "sort3d_run%d"%indep_sort3d)
if os.path.exists(os.path.join(indep_sort3d_dir, "sorting.json")):
fout = open(os.path.join(indep_sort3d_dir, "sorting.json"), "r")
res_sort3d = convert_json_fromunicode(json.load(fout))
fout.close()
merged_classes = split_partition_into_clusters(res_sort3d["Accounted"])
sptp = prep_ptp_single(merged_classes, res_sort3d["total"])
ptp.append(sptp)
else: ERROR("sorting results do not exist", "sort3d", 1, Blockdata["myid"])
accounted_list, unaccounted_list, new_index, sort3d_clusters = do_two_way_comparison_classes(ptp[0], ptp[1], len(res_sort3d["total"]))
Tracker["output"].append("Accounted %d Unaccounted %d"%(len(accounted_list), len(unaccounted_list)))
for ic in xrange(len(sort3d_clusters)):
Tracker["output"].append("Cluster %d members %d"%(ic, len(sort3d_clusters[ic])))
write_text_file(sort3d_clusters[ic], os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%ic))
main_number_of_clusters +=1
if get_number_of_groups(len(unaccounted_list),Tracker["img_per_grp"])<=1:
Tracker["output"].append("Cluster %d members %d"%(main_number_of_clusters, len(unaccounted_list)))
main_keepgoing = 0
write_text_file(unaccounted_list, os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%len(sort3d_clusters)))
Tracker["output"].append("----->>>>> SORT3D finishes with the unaccounted particles are treated as one cluster <<<<<--------")
else: Tracker["output"].append("----->>>>> RSORT <<<<<--------")
total_rsort = len(unaccounted_list)
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"])
main_keepgoing = bcast_number_to_all(main_keepgoing, Blockdata["main_node"], MPI_COMM_WORLD)
main_number_of_clusters = bcast_number_to_all(main_number_of_clusters, Blockdata["main_node"], MPI_COMM_WORLD)
if main_keepgoing == 0:
if(Blockdata["myid"] == Blockdata["main_node"]):
write_text_file(Tracker["output"], os.path.join(Tracker["constants"]["masterdir"], "final.txt"))
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'w')
json.dump(sorting, fout)
fout.close()
mpi_barrier(MPI_COMM_WORLD)
else:
######## RSORT #######
iter_rsort = 0
while iter_rsort< Tracker["total_iter_rsort"]:
Tracker["directory"] = os.path.join(Tracker["constants"]["masterdir"], "rsort%d"%iter_rsort)
if Blockdata["myid"] == Blockdata["main_node"]:
if not os.path.exists(Tracker["directory"]): os.mkdir(Tracker["directory"])
Tracker["output"].append("RSORT %d"%iter_rsort)
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "------------>>>RSORT %d<<<-----------"%iter_rsort
log_main.add(msg)
print(line, msg)
write_text_file(unaccounted_list, os.path.join(Tracker["directory"], "Unaccounted.txt"))
Tracker["total_stack"] = len(unaccounted_list)
else: Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
partid_file = os.path.join(os.path.join(Tracker["directory"], "Unaccounted.txt"))
Tracker["unaccounted_list"] = []
Tracker["accounted_list"] = []
Tracker["number_of_groups"] = get_number_of_groups(Tracker["total_stack"],Tracker["img_per_grp"])
if Tracker["number_of_groups"]>1: #continue N-ways_clustering on unaccounted particles
sorting = {}
sorting["Accounted"] = [Tracker["accounted_list"]] # keep the accounted
final_sort = []
final_list = do_EQKmeans_nways_clustering_stable_seeds(os.path.join(Tracker["constants"]["masterdir"], "rsort%d"%iter_rsort), partid_file,\
os.path.join(Tracker["constants"]["masterdir"],"refinement_parameters.txt"), sorting["Accounted"], log_main)
for a in sorting["Accounted"]: final_sort.append(a)
if (Blockdata["myid"] == Blockdata["main_node"]):
try:
Tracker["unaccounted_list"] = read_text_file(Tracker["Unaccounted_on_disk"]) # defined in do_two_way_comparison_over_nindepruns
if len(Tracker["unaccounted_list"])> Tracker["constants"]["img_per_grp"]//2:
for iparticle in xrange(len(Tracker["unaccounted_list"])):
Tracker["unaccounted_list"][iparticle] = [0, Tracker["unaccounted_list"][iparticle]]
final_sort.append(Tracker["unaccounted_list"])
else:
msg = "unaccounted_list size is too small"
print(line, msg)
log_main.add(msg)
write_text_file(Tracker["unaccounted_list"], os.path.join(Tracker["constants"]["masterdir"], "rsort%d"%iter_rsort, "rsort_unaccounted.txt"))
except: print("no saved unaccounted text")
indexed_particle_list, Tracker["number_of_groups"] = merge_original_id_lists(final_sort)
write_text_row(indexed_particle_list, os.path.join(Tracker["constants"]["masterdir"], "rsort%d"%iter_rsort, "index_for_Kmeans.txt"))
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "Include %d unaccounted as one cluster "%len(Tracker["unaccounted_list"])
print(line, msg)
log_main.add(msg)
iter_rsort +=1
mpi_barrier(MPI_COMM_WORLD)
if(Blockdata["myid"] == Blockdata["main_node"]):
Tracker["output"].append("----->>>>reproducible ratio of two RSORTS<<<<-------")
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
tlist = read_text_file(os.path.join(Tracker["constants"]["masterdir"],"indexes.txt"))
ptp = []
for irsort in xrange(2):
r1 = read_text_row(os.path.join(Tracker["constants"]["masterdir"], "rsort%d"%irsort, "index_for_Kmeans.txt"))
merged_classes = split_partition_into_clusters([r1])
sptp = prep_ptp_single(merged_classes, tlist)
ptp.append(sptp)
accounted_list, unaccounted_list, new_index, rsort_clusters = do_two_way_comparison_classes(ptp[0], ptp[1], len(tlist))
msg = " The final selected particles by two rsorts: %d percentage: %5.2f "%(len(accounted_list), float(len(accounted_list))/float(total_rsort)*100.0)
print(line, msg)
log_main.add(msg)
Tracker["output"].append(msg)
for icluster in xrange(main_number_of_clusters, len(rsort_clusters) + main_number_of_clusters):
Tracker["output"].append("Cluster %d members %d"%(icluster, len(rsort_clusters[icluster - main_number_of_clusters])))
write_text_file(rsort_clusters[icluster - main_number_of_clusters], os.path.join(Tracker["constants"]["masterdir"],"Cluster_%03d.txt"%icluster))
Tracker["number_of_groups"] = len(rsort_clusters)
Tracker["output"].append("----->>>>> RSORT is done<<<<<--------")
write_text_file(Tracker["output"], os.path.join(Tracker["constants"]["masterdir"], "final.txt"))
else:Tracker = 0
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
mpi_barrier(MPI_COMM_WORLD)
if(Blockdata["myid"] == Blockdata["main_node"]):
clusters_list = []
number_of_groups = 0
while os.path.exists(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups)):
clusters_list.append(read_text_file(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups)))
number_of_groups +=1
final_partition, tmp_index = merge_classes_into_partition_list(clusters_list)
write_text_row(tmp_index, os.path.join(Tracker["constants"]["masterdir"], "final_partition.txt"))
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'w')
json.dump(Tracker, fout)
fout.close()
mpi_barrier(MPI_COMM_WORLD)
### Final Rec3D unfiltered two halves, valid only in case of sorting initiated from sphire refinement
if Tracker["constants"]["final_sharpen"]:
Tracker["constants"]["orgres"] = 0.0
Tracker["constants"]["refinement_delta"] = 0.0
Tracker["constants"]["refinement_ts"] = 0.0
Tracker["constants"]["refinement_xr"] = 0.0
Tracker["constants"]["refinement_an"] = 0.0
minimum_size = Tracker["constants"]["img_per_grp"]
number_of_groups = 0
if(Blockdata["myid"] == Blockdata["main_node"]):
final_accounted_ptl = 0
line = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
msg = "---->>> Summaries of the final results <<<-----"
print(line, msg)
Tracker["output"].append(msg)
while os.path.exists(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups)):
class_in = read_text_file(os.path.join(Tracker["constants"]["masterdir"], "Cluster_%03d.txt"%number_of_groups))
minimum_size = min(len(class_in), minimum_size)
msg = " %10d clusters %10d group size "%(number_of_groups, len(class_in))
print(line, msg)
Tracker["output"].append(msg)
number_of_groups += 1
final_accounted_ptl +=len(class_in)
del class_in
msg = "total number of particle images: %10d; accounted: %10d ; number_of_groups: %5d"%(Tracker["constants"]["total_stack"], final_accounted_ptl, number_of_groups)
print(line, msg)
Tracker["output"].append(msg)
else: Tracker = 0
number_of_groups = bcast_number_to_all(number_of_groups, Blockdata["main_node"], MPI_COMM_WORLD)
Tracker = wrap_mpi_bcast(Tracker, Blockdata["main_node"], MPI_COMM_WORLD)
if number_of_groups == 0:ERROR("No cluster is found, and the program terminates.", "do_final_maps", 1, Blockdata["myid"])
minimum_size = bcast_number_to_all(minimum_size, Blockdata["main_node"], MPI_COMM_WORLD)
compute_noise(Tracker["constants"]["nnxo"])
do_final_maps(number_of_groups, minimum_size, Tracker["constants"]["selected_iter"], Tracker["constants"]["refinement_dir"], \
Tracker["constants"]["masterdir"], Tracker["constants"]["nnxo"], log_main)
mpi_barrier(MPI_COMM_WORLD)
if(Blockdata["myid"] == Blockdata["main_node"]):
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'r')
Tracker = convert_json_fromunicode(json.load(fout))
fout.close()
for iproc in xrange(number_of_groups):
msg = merge_two_unfiltered_maps(os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_0_grp%03d.hdf"%iproc), \
os.path.join(Tracker["constants"]["masterdir"], "vol_unfiltered_1_grp%03d.hdf"%iproc), iproc)
Tracker["output"].append(msg)
write_text_file(Tracker["output"], os.path.join(Tracker["constants"]["masterdir"], "final.txt"))
fout = open(os.path.join(Tracker["constants"]["masterdir"], "Tracker.json"),'w')
json.dump(Tracker, fout)
fout.close()
mpi_barrier(MPI_COMM_WORLD)
from mpi import mpi_finalize
mpi_finalize()
exit()
if __name__ == "__main__":
main() sphire_beta20170901_patch20170906/src/eman2/sparx/bin/sxlocres.py 0000775 0030616 0076400 00000021534 13153776461 024664 0 ustar stabrin Domain Users #!/usr/bin/env python
#
# Author: Pawel A.Penczek and Edward H. Egelman 05/27/2009 (Pawel.A.Penczek@uth.tmc.edu)
# Copyright (c) 2000-2006 The University of Texas - Houston Medical School
# Copyright (c) 2008-Forever The University of Virginia
#
# This software is issued under a joint BSD/GNU license. You may use the
# source code in this file under either license. However, note that the
# complete EMAN2 and SPARX software packages have some GPL dependencies,
# so you are responsible for compliance with the licenses of these packages
# if you opt to use BSD licensing. The warranty disclaimer below holds
# in either instance.
#
# This complete copyright notice must be included in any revised version of the
# source code. Additional authorship citations may be added, but existing
# author citations must be preserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
#
import global_def
from global_def import *
from EMAN2 import *
from sparx import *
from global_def import SPARX_MPI_TAG_UNIVERSAL
#Transforms the local resolution file from frequency units to angstroms.
def makeAngRes(freqvol, nx, ny, nz, pxSize):
if (pxSize == 1.0):
print "Using a value of 1 for the pixel size. Are you sure this is correct?"
outAngResVol = EMData()
outAngResVol.set_size(nx,ny,nz)
for x in range(nx):
for y in range(ny):
for z in range(nz):
#All voxels to apix/ absolute Resolution. If 0 then leave as it is.
if (freqvol[x,y,z] == 0):
outAngResVol[x,y,z] = 0
else:
outAngResVol[x,y,z] = pxSize / freqvol[x,y,z]
return outAngResVol
def main():
import os
import sys
from optparse import OptionParser
arglist = []
for arg in sys.argv:
arglist.append( arg )
progname = os.path.basename(arglist[0])
usage = progname + """ firstvolume secondvolume maskfile outputfile --wn --step --cutoff --radius --fsc --res_overall --out_ang_res --apix --MPI
Compute local resolution in real space within area outlined by the maskfile and within regions wn x wn x wn
"""
parser = OptionParser(usage,version=SPARXVERSION)
parser.add_option("--wn", type="int", default=7, help="Size of window within which local real-space FSC is computed. (default 7)")
parser.add_option("--step", type="float", default= 1.0, help="Shell step in Fourier size in pixels. (default 1.0)")
parser.add_option("--cutoff", type="float", default= 0.5, help="Resolution cut-off for FSC. (default 0.5)")
parser.add_option("--radius", type="int", default=-1, help="If there is no maskfile, sphere with r=radius will be used. By default, the radius is nx/2-wn (default -1)")
parser.add_option("--fsc", type="string", default= None, help="Save overall FSC curve (might be truncated). By default, the program does not save the FSC curve. (default none)")
parser.add_option("--res_overall", type="float", default= -1.0, help="Overall resolution estimated by the user [abs units]. (default None)")
parser.add_option("--out_ang_res", action="store_true", default=False, help="Additionally creates a local resolution file in Angstroms. (default False)")
parser.add_option("--apix", type="float", default= 1.0, help="Pixel size in Angstrom. Effective only with --out_ang_res options. (default 1.0)")
parser.add_option("--MPI", action="store_true", default=False, help="Use MPI version.")
(options, args) = parser.parse_args(arglist[1:])
if len(args) <3 or len(args) > 4:
print "See usage " + usage
sys.exit()
if global_def.CACHE_DISABLE:
from utilities import disable_bdb_cache
disable_bdb_cache()
res_overall = options.res_overall
if options.MPI:
from mpi import mpi_init, mpi_comm_size, mpi_comm_rank, MPI_COMM_WORLD
from mpi import mpi_reduce, mpi_bcast, mpi_barrier, mpi_gatherv, mpi_send, mpi_recv
from mpi import MPI_SUM, MPI_FLOAT, MPI_INT
sys.argv = mpi_init(len(sys.argv),sys.argv)
number_of_proc = mpi_comm_size(MPI_COMM_WORLD)
myid = mpi_comm_rank(MPI_COMM_WORLD)
main_node = 0
global_def.MPI = True
cutoff = options.cutoff
nk = int(options.wn)
if(myid == main_node):
#print sys.argv
vi = get_im(sys.argv[1])
ui = get_im(sys.argv[2])
nx = vi.get_xsize()
ny = vi.get_ysize()
nz = vi.get_zsize()
dis = [nx, ny, nz]
else:
dis = [0,0,0,0]
global_def.BATCH = True
dis = bcast_list_to_all(dis, myid, source_node = main_node)
if(myid != main_node):
nx = int(dis[0])
ny = int(dis[1])
nz = int(dis[2])
vi = model_blank(nx,ny,nz)
ui = model_blank(nx,ny,nz)
if len(args) == 3:
m = model_circle((min(nx,ny,nz)-nk)//2,nx,ny,nz)
outvol = args[2]
elif len(args) == 4:
if(myid == main_node):
m = binarize(get_im(args[2]), 0.5)
else:
m = model_blank(nx, ny, nz)
outvol = args[3]
bcast_EMData_to_all(m, myid, main_node)
from statistics import locres
"""
res_overall = 0.5
if myid ==main_node:
fsc_curve = fsc(vi, ui)
for ifreq in xrange(len(fsc_curve[0])-1, -1, -1):
if fsc_curve[1][ifreq] > options.cutoff:
res_overall = fsc_curve[0][ifreq]
break
res_overall = bcast_number_to_all(res_overall, main_node)
"""
freqvol, resolut = locres(vi, ui, m, nk, cutoff, options.step, myid, main_node, number_of_proc)
if(myid == 0):
if res_overall !=-1.0:
freqvol += (res_overall- Util.infomask(freqvol, m, True)[0])
for ifreq in xrange(len(resolut)):
if resolut[ifreq][0] >res_overall:
break
for jfreq in xrange(ifreq, len(resolut)):
resolut[jfreq][1] = 0.0
freqvol.write_image(outvol)
if(options.out_ang_res):
outAngResVolName = os.path.splitext(outvol)[0] + "_ang.hdf"
outAngResVol = makeAngRes(freqvol, nx, ny, nz, options.apix)
outAngResVol.write_image(outAngResVolName)
if(options.fsc != None): write_text_row(resolut, options.fsc)
from mpi import mpi_finalize
mpi_finalize()
else:
cutoff = options.cutoff
vi = get_im(args[0])
ui = get_im(args[1])
nn = vi.get_xsize()
nk = int(options.wn)
if len(args) == 3:
m = model_circle((nn-nk)//2,nn,nn,nn)
outvol = args[2]
elif len(args) == 4:
m = binarize(get_im(args[2]), 0.5)
outvol = args[3]
mc = model_blank(nn,nn,nn,1.0)-m
vf = fft(vi)
uf = fft(ui)
"""
res_overall = 0.5
fsc_curve = fsc(vi, ui)
for ifreq in xrange(len(fsc_curve[0])-1, -1, -1):
if fsc_curve[1][ifreq] > options.cutoff:
res_overall = fsc_curve[0][ifreq]
break
"""
lp = int(nn/2/options.step+0.5)
step = 0.5/lp
freqvol = model_blank(nn,nn,nn)
resolut = []
for i in xrange(1,lp):
fl = step*i
fh = fl+step
print lp,i,step,fl,fh
v = fft(filt_tophatb( vf, fl, fh))
u = fft(filt_tophatb( uf, fl, fh))
tmp1 = Util.muln_img(v,v)
tmp2 = Util.muln_img(u,u)
do = Util.infomask(square_root(threshold(Util.muln_img(tmp1,tmp2))),m,True)[0]
tmp3 = Util.muln_img(u,v)
dp = Util.infomask(tmp3,m,True)[0]
resolut.append([i,(fl+fh)/2.0, dp/do])
tmp1 = Util.box_convolution(tmp1, nk)
tmp2 = Util.box_convolution(tmp2, nk)
tmp3 = Util.box_convolution(tmp3, nk)
Util.mul_img(tmp1,tmp2)
tmp1 = square_root(threshold(tmp1))
Util.mul_img(tmp1,m)
Util.add_img(tmp1,mc)
Util.mul_img(tmp3,m)
Util.add_img(tmp3,mc)
Util.div_img(tmp3,tmp1)
Util.mul_img(tmp3,m)
freq=(fl+fh)/2.0
bailout = True
for x in xrange(nn):
for y in xrange(nn):
for z in xrange(nn):
if(m.get_value_at(x,y,z) > 0.5):
if(freqvol.get_value_at(x,y,z) == 0.0):
if(tmp3.get_value_at(x,y,z) < cutoff):
freqvol.set_value_at(x,y,z, freq)
bailout = False
else:
bailout = False
if(bailout): break
print len(resolut)
if res_overall !=-1.0:
freqvol += (res_overall- Util.infomask(freqvol, m, True)[0])
for ifreq in xrange(len(resolut)):
if resolut[ifreq][1] >res_overall:
break
for jfreq in xrange(ifreq, len(resolut)):
resolut[jfreq][2] = 0.0
freqvol.write_image(outvol)
if(options.out_ang_res):
outAngResVolName = os.path.splitext(outvol)[0] + "_ang.hdf"
outAngResVol = makeAngRes(freqvol, nn, nn, nn, options.apix)
outAngResVol.write_image(outAngResVolName)
if(options.fsc != None): write_text_row(resolut, options.fsc)
if __name__ == "__main__":
main()
sphire_beta20170901_patch20170906/install_sxpatch.py 0000775 0030616 0076400 00000026071 13154017623 022512 0 ustar stabrin Domain Users #!/usr/bin/env python
#
# Author: Toshio Moriya 09/06/2017 (toshio.moriya@mpi-dortmund.mpg.de)
#
# This software is issued under a joint BSD/GNU license. You may use the
# source code in this file under either license. However, note that the
# complete SPHIRE and EMAN2 software packages have some GPL dependencies,
# so you are responsible for compliance with the licenses of these packages
# if you opt to use BSD licensing. The warranty disclaimer below holds
# in either instance.
#
# This complete copyright notice must be included in any revised version of the
# source code. Additional authorship citations may be added, but existing
# author citations must be preserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# ========================================================================================
# Imports
# ========================================================================================
# Python Standard Libraries
from __future__ import print_function
import sys
import os
import argparse
# ========================================================================================
# Helper Functions
# ========================================================================================
# ----------------------------------------------------------------------------------------
# Generate command line
# ----------------------------------------------------------------------------------------
def get_cmd_line():
cmd_line = ""
for arg in sys.argv:
cmd_line += arg + " "
cmd_line = "Shell line command is \'%s\'" % cmd_line.strip()
return cmd_line
# ----------------------------------------------------------------------------------------
# Print progress message with time stamp
# ----------------------------------------------------------------------------------------
def print_progress(message):
from time import strftime, localtime
time_stamp = strftime("%Y-%m-%d_%H:%M:%S", localtime()) + " =>"
print(time_stamp, message)
# ----------------------------------------------------------------------------------------
# Generate backup root directory name with current local time
# ----------------------------------------------------------------------------------------
def get_backup_root_dir_name():
from time import strftime, localtime
return strftime("backup_%Y%m%d_%H%M%S", localtime())
# ----------------------------------------------------------------------------------------
# Do the actual installation
# ----------------------------------------------------------------------------------------
def install_patch_src_files(patch_src_root_dir_path, patch_src_subdir_rpath, backup_root_dir_path, install_dir_path, install_subdir_rpath, python_script_directive=None):
import glob
import shutil
import stat
print_progress("Checking \'%s\' subdirectory..." % patch_src_subdir_rpath)
patch_src_subdir_rpath = os.path.join(patch_src_root_dir_path, patch_src_subdir_rpath)
if (os.path.exists(patch_src_subdir_rpath)):
print_progress("Making the list of patch files in \'%s\'..." % patch_src_subdir_rpath)
# print_progress("MRK_DEBUG: patch_src_subdir_rpath := %s" % patch_src_subdir_rpath)
patch_src_file_path_pattern = os.path.join(patch_src_subdir_rpath, "*.py")
patch_src_file_path_list = glob.glob(patch_src_file_path_pattern)
print_progress("Found %d Python script files in \'%s\'." % (len(patch_src_file_path_list), patch_src_subdir_rpath))
if len(patch_src_file_path_list) > 0:
backup_subdir_path = os.path.join(backup_root_dir_path, patch_src_subdir_rpath)
# print_progress("MRK_DEBUG: backup_subdir_path := %s" % backup_subdir_path)
if not os.path.exists(backup_subdir_path):
print_progress("Making backup subdirectory \'%s\'..." % backup_subdir_path)
os.makedirs(backup_subdir_path)
else:
print_progress("Nothing to do..." )
return
assert(os.path.exists(backup_subdir_path))
install_subdir_path = os.path.join(install_dir_path, install_subdir_rpath)
for patch_src_file_path in patch_src_file_path_list:
patch_file_name = os.path.basename(patch_src_file_path)
install_file_path = os.path.join(install_subdir_path, patch_file_name)
backup_file_path = os.path.join(backup_subdir_path, patch_file_name)
# print_progress("MRK_DEBUG: patch_file_name := %s " % patch_file_name)
# print_progress("MRK_DEBUG: patch_src_file_path := %s " % patch_src_file_path)
# print_progress("MRK_DEBUG: install_file_path := %s " % install_file_path)
# print_progress("MRK_DEBUG: backup_file_path := %s " % backup_file_path)
print_progress("Replacing \'%s\' with \'%s\'..." % (install_file_path, patch_src_file_path))
# Backup the originally-installed file
if os.path.exists(install_file_path):
shutil.copy2(install_file_path, backup_file_path)
# Check the consistency of the python script directive if necessary
install_file = open(install_file_path, "r")
install_1st_line = install_file.readline().strip()
# print_progress("MRK_DEBUG: install_1st_line := %s " % install_1st_line)
if install_1st_line != python_script_directive:
print_progress("WARNING!!! Found unexpected Python script directive \'%s\' in \'%s\'. However, this should not cause any problems..." % (install_1st_line, install_file_path))
install_file.close()
else:
print_progress("%s does not exist in \'%s\'. The program will not make the backup..." % (patch_file_name, install_subdir_path))
# assert(not os.path.exists(install_file_path))
# Copy the patch file to install subdirectory
if python_script_directive is not None:
# Overwrite python script directive if necessary
patch_src_file = open(patch_src_file_path, "r")
patch_src_1st_line = patch_src_file.readline().strip()
patch_src_remainder_lines = patch_src_file.read()
patch_src_file.close()
if patch_src_1st_line != "#!/usr/bin/env python":
print_progress("WARNING!!! Found unexpected Python script directive \'%s\' in \'%s\'. However, this should not cause any problems..." % (patch_src_1st_line, patch_src_file_path))
install_file = open(install_file_path, "w")
install_file.write(python_script_directive + "\n")
install_file.write(patch_src_remainder_lines)
install_file.close()
else:
# Do simply copy in this case
shutil.copy2(patch_src_file_path, install_file_path)
# assert(os.path.exists(install_file_path))
install_file_stat = os.stat(install_file_path)
os.chmod(install_file_path, install_file_stat.st_mode | 0o111) # Make all executable, Use octal system representation for bits 0o111 = 0b001001001 = Linux 111 = 0d73
else:
print_progress("Nothing to do..." )
return
# ========================================================================================
# Command functions
# ========================================================================================
# ----------------------------------------------------------------------------------------
# TEST COMMAND
# cd /home/moriya/mrk_develop/sxdemo/sxdemo07_20160908/mpi_bdb_ctf
# rm -r mrkout_sxpipe_isac_substack; sxpipe.py isac_substack bdb:beta20161216_pa03b_sxwindow01#data beta20161216_pa04a_sxisac01/class_averages.hdf mrkout_sxpipe_isac_substack
#
# ----------------------------------------------------------------------------------------
def install_sxpatch(args):
from distutils.spawn import find_executable
install_dir_path = args.install_dir_path
if install_dir_path is None:
sphire_gui_abs_path = find_executable("sphire")
# print("MRK_DEBUG: sphire_gui_abs_path = ", sphire_gui_abs_path)
if sphire_gui_abs_path is None:
print_progress("Program can not find the SPHIRE installation directory. Please manually specify the directory using --install_dir_path options.")
return
# assert(os.path.exists(sphire_gui_abs_path))
install_dir_path = os.path.dirname(os.path.dirname(sphire_gui_abs_path))
else:
if not os.path.exists(install_dir_path):
print_progress("Provided SPHIRE installation directory does not exist. Please check the directory path provided with --install_dir_path options.")
return
assert(os.path.exists(install_dir_path))
print_progress("SPHIRE installation directory is \'%s\'" % (install_dir_path))
print(" ")
# Define constants
patch_src_root_dir_path = "src"
backup_root_dir_path = get_backup_root_dir_name()
#!/work/software/sphire/beta_20170901/EMAN2/bin/python
python_script_directive = '#!' + os.path.join(install_dir_path, "bin/python")
# print_progress("MRK_DEBUG: python_script_directive := %s " % python_script_directive)
install_patch_src_files(patch_src_root_dir_path, "eman2/sparx/bin", backup_root_dir_path, install_dir_path, "bin", python_script_directive)
print(" ")
install_patch_src_files(patch_src_root_dir_path, "eman2/sparx/lib", backup_root_dir_path, install_dir_path, "lib")
print(" ")
# ========================================================================================
# Main function
# ========================================================================================
def main():
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# Set up argument parser (supports subcommand)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
parser = argparse.ArgumentParser(description='Python script to install patch for a SPHIRE release.')
parser.add_argument('--version', action='version', version="Version 0.0.0.0")
# create the parser for the 'isac_substack' command
parser.add_argument('--install_dir_path', type=str, default=None, help='Specify path to SPHIRE installation directory. By default, the program try to detect it automatically by extracting the path from \'which sphire\'. (default none)')
parser.set_defaults(func=install_sxpatch)
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
# Run specified subcommand
# ><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><
args = parser.parse_args() # Get namespace object from parser
# args_dict = vars(parser.parse_args()) # convert it to dictionary object
# print (args_dict)
print_progress(get_cmd_line())
print(" ")
# Call the associated function of the specified subcommand
args.func(args)
print_progress("DONE!!!")
print(" ")
# ----------------------------------------------------------------------------------------
if __name__ == '__main__':
main()
# ========================================================================================
# END OF SCRIPT
# ========================================================================================