signature
stringlengths
29
44.1k
implementation
stringlengths
0
85.2k
def wait_until_page_contains_elements ( self , timeout , * locators ) : """This is a copy of ` Wait Until Page Contains Element ` but it allows multiple arguments in order to wait for more than one element . | * Argument * | * Description * | * Example * | | timeout | maximum time to wait , if set to $ { None...
self . _wait_until_no_error ( timeout , self . _wait_for_elements , locators )
def __loaddate ( ) : '''載入檔案 檔案依據 http : / / www . twse . com . tw / ch / trading / trading _ days . php'''
csv_path = os . path . join ( os . path . dirname ( __file__ ) , 'opendate.csv' ) with open ( csv_path ) as csv_file : csv_data = csv . reader ( csv_file ) result = { } result [ 'close' ] = [ ] result [ 'open' ] = [ ] for i in csv_data : if i [ 1 ] == '0' : # 0 = 休市 result [ 'clo...
def main ( ) : """Shows useful information about how - to configure alias on a first run and configure automatically on a second . It ' ll be only visible when user type fuck and when alias isn ' t configured ."""
settings . init ( ) configuration_details = shell . how_to_configure ( ) if ( configuration_details and configuration_details . can_configure_automatically ) : if _is_already_configured ( configuration_details ) : logs . already_configured ( configuration_details ) return elif _is_second_run ( )...
def _createFromObject ( obj , * args , ** kwargs ) : """Creates an RTI given an object . Auto - detects which RTI class to return . The * args and * * kwargs parameters are passed to the RTI constructor . It is therefor important that all memory RTIs accept the same parameters in the constructor ( with except...
if is_a_sequence ( obj ) : return SequenceRti ( obj , * args , ** kwargs ) elif is_a_mapping ( obj ) : return MappingRti ( obj , * args , ** kwargs ) elif is_an_array ( obj ) : return ArrayRti ( obj , * args , ** kwargs ) elif isinstance ( obj , bytearray ) : return ArrayRti ( np . array ( obj ) , * arg...
def save_scatter_table ( self , fn , description = "" ) : """Save the scattering lookup tables . Save the state of the scattering lookup tables to a file . This can be loaded later with load _ scatter _ table . Other variables will not be saved , but this does not matter because the results of the computati...
data = { "description" : description , "time" : datetime . now ( ) , "psd_scatter" : ( self . num_points , self . D_max , self . _psd_D , self . _S_table , self . _Z_table , self . _angular_table , self . _m_table , self . geometries ) , "version" : tmatrix_aux . VERSION } pickle . dump ( data , file ( fn , 'w' ) , pic...
def set_statics ( self ) : """Create statics directory and copy files in it"""
if not os . path . exists ( self . results_dir ) : return None try : shutil . copytree ( os . path . join ( self . templates_dir , 'css' ) , os . path . join ( self . results_dir , 'css' ) ) shutil . copytree ( os . path . join ( self . templates_dir , 'scripts' ) , os . path . join ( self . results_dir , '...
def psetex ( self , name , value , time_ms ) : """Set the value of key ` ` name ` ` to ` ` value ` ` that expires in ` ` time _ ms ` ` milliseconds . ` ` time _ ms ` ` can be represented by an integer or a Python timedelta object"""
with self . pipe as pipe : return pipe . psetex ( self . redis_key ( name ) , time_ms = time_ms , value = self . valueparse . encode ( value = value ) )
def rank ( matrix , atol = 1e-13 , rtol = 0 ) : """Estimate the rank , i . e . , the dimension of the column space , of a matrix . The algorithm used by this function is based on the singular value decomposition of ` stoichiometry _ matrix ` . Parameters matrix : ndarray The matrix should be at most 2 - D...
matrix = np . atleast_2d ( matrix ) sigma = svd ( matrix , compute_uv = False ) tol = max ( atol , rtol * sigma [ 0 ] ) return int ( ( sigma >= tol ) . sum ( ) )
def present ( name , value , zone , record_type , ttl = None , identifier = None , region = None , key = None , keyid = None , profile = None , wait_for_sync = True , split_dns = False , private_zone = False ) : '''Ensure the Route53 record is present . name Name of the record . value Value of the record . ...
ret = { 'name' : name , 'result' : True , 'comment' : '' , 'changes' : { } } # If a list is passed in for value , change it to a comma - separated string # So it will work with subsequent boto module calls and string functions if isinstance ( value , list ) : value = ',' . join ( value ) elif value . startswith ( '...
def _supported_imts ( self ) : """Updates the list of supported IMTs from the tables"""
imt_list = [ ] for key in self . imls : if "SA" in key : imt_list . append ( imt_module . SA ) elif key == "T" : continue else : try : factory = getattr ( imt_module , key ) except Exception : continue imt_list . append ( factory ) return imt_l...
def from_inline ( cls : Type [ CertificationType ] , version : int , currency : str , blockhash : Optional [ str ] , inline : str ) -> CertificationType : """Return Certification instance from inline document Only self . pubkey _ to is populated . You must populate self . identity with an Identity instance to u...
cert_data = Certification . re_inline . match ( inline ) if cert_data is None : raise MalformedDocumentError ( "Certification ({0})" . format ( inline ) ) pubkey_from = cert_data . group ( 1 ) pubkey_to = cert_data . group ( 2 ) blockid = int ( cert_data . group ( 3 ) ) if blockid == 0 or blockhash is None : ti...
def _maybe_update ( self , user , attribute , new_value ) : """DRY helper . If the specified attribute of the user differs from the specified value , it will be updated ."""
old_value = getattr ( user , attribute ) if new_value != old_value : self . stderr . write ( _ ( 'Setting {attribute} for user "{username}" to "{new_value}"' ) . format ( attribute = attribute , username = user . username , new_value = new_value ) ) setattr ( user , attribute , new_value )
def make_authentiq_blueprint ( client_id = None , client_secret = None , scope = "openid profile" , redirect_url = None , redirect_to = None , login_url = None , authorized_url = None , session_class = None , storage = None , hostname = "connect.authentiq.io" , ) : """Make a blueprint for authenticating with authen...
authentiq_bp = OAuth2ConsumerBlueprint ( "authentiq" , __name__ , client_id = client_id , client_secret = client_secret , scope = scope , base_url = "https://{hostname}/" . format ( hostname = hostname ) , authorization_url = "https://{hostname}/authorize" . format ( hostname = hostname ) , token_url = "https://{hostna...
def new_message ( cls , from_user , to_users , subject , content ) : """Create a new Message and Thread . Mark thread as unread for all recipients , and mark thread as read and deleted from inbox by creator ."""
thread = Thread . objects . create ( subject = subject ) for user in to_users : thread . userthread_set . create ( user = user , deleted = False , unread = True ) thread . userthread_set . create ( user = from_user , deleted = True , unread = False ) msg = cls . objects . create ( thread = thread , sender = from_us...
def _selectView ( self ) : """Matches the view selection to the trees selection ."""
scene = self . uiGanttVIEW . scene ( ) scene . blockSignals ( True ) scene . clearSelection ( ) for item in self . uiGanttTREE . selectedItems ( ) : item . viewItem ( ) . setSelected ( True ) scene . blockSignals ( False ) curr_item = self . uiGanttTREE . currentItem ( ) vitem = curr_item . viewItem ( ) if vitem : ...
def send ( self , data ) : """This method keeps trying to send a message relying on the run method to reopen the websocket in case it was closed ."""
while not self . stopped ( ) : try : self . ws . send ( data ) return except websocket . WebSocketConnectionClosedException : # config . LOGGER . debug ( ' WebSocket closed , retrying send . ' ) # TODO ( investigate infinite loop ) time . sleep ( 0.1 )
def similarity ( state_a , state_b ) : """The ( L2 ) distance between the counts of the state addresses in the history of the path . : param state _ a : The first state to compare : param state _ b : The second state to compare"""
count_a = Counter ( state_a . history . bbl_addrs ) count_b = Counter ( state_b . history . bbl_addrs ) normal_distance = sum ( ( count_a . get ( addr , 0 ) - count_b . get ( addr , 0 ) ) ** 2 for addr in set ( list ( count_a . keys ( ) ) + list ( count_b . keys ( ) ) ) ) ** 0.5 return 1.0 / ( 1 + normal_distance )
def set_border_style_type ( self , border_style_type ) : """Set the border style using the specified border style type . The border style type should be an integer value recognized by the border style factory for this formatter instance . The built - in border style types are provided by the ` MenuBorderStyleTy...
style = self . __border_style_factory . create_border ( border_style_type ) self . set_border_style ( style ) return self
def authenticate ( remote_addr , password , cert , key , verify_cert = True ) : '''Authenticate with a remote LXDaemon . remote _ addr : An URL to a remote Server , you also have to give cert and key if you provide remote _ addr and its a TCP Address ! Examples : https : / / myserver . lan : 8443 passwo...
client = pylxd_client_get ( remote_addr , cert , key , verify_cert ) if client . trusted : return True try : client . authenticate ( password ) except pylxd . exceptions . LXDAPIException as e : # Wrong password raise CommandExecutionError ( six . text_type ( e ) ) return client . trusted
def _last ( self , ** spec ) : """Get the latest entry in this category , optionally including subcategories"""
for record in self . _entries ( spec ) . order_by ( orm . desc ( model . Entry . local_date ) , orm . desc ( model . Entry . id ) ) [ : 1 ] : return entry . Entry ( record ) return None
def add ( self , child ) : """Adds a typed child object to the component type . @ param child : Child object to be added ."""
if isinstance ( child , FatComponent ) : self . add_child_component ( child ) else : Fat . add ( self , child )
def delete_checkpoint ( self , checkpoint_dir ) : """Removes subdirectory within checkpoint _ folder Parameters checkpoint _ dir : path to checkpoint"""
if os . path . isfile ( checkpoint_dir ) : shutil . rmtree ( os . path . dirname ( checkpoint_dir ) ) else : shutil . rmtree ( checkpoint_dir )
def identifier ( self ) : """These models have server - generated identifiers . If we don ' t already have it in memory , then assume that it has not yet been generated ."""
if self . primary_key not in self . _data : return 'Unknown' return str ( self . _data [ self . primary_key ] )
def parse ( query_string , info = { } ) : """: returns : a normalized query _ dict as in the following examples : > > > parse ( ' kind = stats ' , { ' stats ' : { ' mean ' : 0 , ' max ' : 1 } } ) { ' kind ' : [ ' mean ' , ' max ' ] , ' k ' : [ 0 , 1 ] , ' rlzs ' : False } > > > parse ( ' kind = rlzs ' , { ' s...
qdic = parse_qs ( query_string ) loss_types = info . get ( 'loss_types' , [ ] ) for key , val in qdic . items ( ) : # for instance , convert site _ id to an int if key == 'loss_type' : qdic [ key ] = [ loss_types [ k ] for k in val ] else : qdic [ key ] = [ lit_eval ( v ) for v in val ] if info ...
def dispatch ( self ) : """Wraps the dispatch method to add session support ."""
try : webapp2 . RequestHandler . dispatch ( self ) finally : self . session_store . save_sessions ( self . response )
def typechecked_func ( func , force = False , argType = None , resType = None , prop_getter = False ) : """Works like typechecked , but is only applicable to functions , methods and properties ."""
if not pytypes . checking_enabled and not pytypes . do_logging_in_typechecked : return func assert ( _check_as_func ( func ) ) if not force and is_no_type_check ( func ) : return func if hasattr ( func , 'do_typecheck' ) : func . do_typecheck = True return func elif hasattr ( func , 'do_logging' ) : # a...
def cal_gpa ( grades ) : """根据成绩数组计算课程平均绩点和 gpa , 算法不一定与学校一致 , 结果仅供参考 : param grades : : meth : ` models . StudentSession . get _ my _ achievements ` 返回的成绩数组 : return : 包含了课程平均绩点和 gpa 的元组"""
# 课程总数 courses_sum = len ( grades ) # 课程绩点和 points_sum = 0 # 学分和 credit_sum = 0 # 课程学分 x 课程绩点之和 gpa_points_sum = 0 for grade in grades : point = get_point ( grade . get ( '补考成绩' ) or grade [ '成绩' ] ) credit = float ( grade [ '学分' ] ) points_sum += point credit_sum += credit gpa_points_sum += credit ...
def on_state_changed ( self , state ) : """Connect / disconnect sig _ key _ pressed signal ."""
if state : self . editor . sig_key_pressed . connect ( self . _on_key_pressed ) else : self . editor . sig_key_pressed . disconnect ( self . _on_key_pressed )
def main ( ) : """Program entry point"""
parser = argparse . ArgumentParser ( ) parser . add_argument ( "path" , help = "Path to the CAPTCHA image file" ) parser . add_argument ( "--prefix" , help = "Checkpoint prefix [Default 'ocr']" , default = 'ocr' ) parser . add_argument ( "--epoch" , help = "Checkpoint epoch [Default 100]" , type = int , default = 100 )...
def getParser ( ) : "Creates and returns the argparse parser object ."
parser = argparse . ArgumentParser ( description = __description__ , formatter_class = argparse . RawDescriptionHelpFormatter ) parser . add_argument ( 'images' , nargs = '+' , help = 'The images used for training (in the learning case) or to transform (in the transformation case)' ) apply_group = parser . add_argument...
def open_with_encoding ( filename , encoding , mode = 'r' ) : """Return opened file with a specific encoding ."""
return io . open ( filename , mode = mode , encoding = encoding , newline = '' )
def _get_result_wrapper ( self , query ) : """Get result wrapper class ."""
cursor = RowsCursor ( self . _rows , self . _cursor . description ) return query . _get_cursor_wrapper ( cursor )
def fast_median ( a ) : """Fast median operation for masked array using 50th - percentile"""
a = checkma ( a ) # return scoreatpercentile ( a . compressed ( ) , 50) if a . count ( ) > 0 : out = np . percentile ( a . compressed ( ) , 50 ) else : out = np . ma . masked return out
def p_localparamdecl_integer ( self , p ) : 'localparamdecl : LOCALPARAM INTEGER param _ substitution _ list SEMICOLON'
paramlist = [ Localparam ( rname , rvalue , lineno = p . lineno ( 3 ) ) for rname , rvalue in p [ 3 ] ] p [ 0 ] = Decl ( tuple ( paramlist ) , lineno = p . lineno ( 1 ) ) p . set_lineno ( 0 , p . lineno ( 1 ) )
def key_string_to_lens_path ( key_string ) : """Converts a key string like ' foo . bar . 0 . wopper ' to [ ' foo ' , ' bar ' , 0 , ' wopper ' ] : param { String } keyString The dot - separated key string : return { [ String ] } The lens array containing string or integers"""
return map ( if_else ( isinstance ( int ) , # convert to int lambda s : int ( s ) , # Leave the string alone identity ) , key_string . split ( '.' ) )
def query ( dataset_key , query , query_type = 'sql' , profile = 'default' , parameters = None , ** kwargs ) : """Query an existing dataset : param dataset _ key : Dataset identifier , in the form of owner / id or of a url : type dataset _ key : str : param query : SQL or SPARQL query : type query : str :...
return _get_instance ( profile , ** kwargs ) . query ( dataset_key , query , query_type = query_type , parameters = parameters , ** kwargs )
def update ( self ) : """Update the status of the range setting ."""
self . _controller . update ( self . _id , wake_if_asleep = False ) data = self . _controller . get_charging_params ( self . _id ) if data and ( time . time ( ) - self . __manual_update_time > 60 ) : self . __maxrange_state = data [ 'charge_to_max_range' ]
def synchelp ( f ) : '''The synchelp decorator allows the transparent execution of a coroutine using the global loop from a thread other than the event loop . In both use cases , teh actual work is done by the global event loop . Examples : Use as a decorator : : @ s _ glob . synchelp async def stuff ...
def wrap ( * args , ** kwargs ) : coro = f ( * args , ** kwargs ) if not iAmLoop ( ) : return sync ( coro ) return coro return wrap
def augknt ( knots , order ) : """Augment a knot vector . Parameters : knots : Python list or rank - 1 array , the original knot vector ( without endpoint repeats ) order : int , > = 0 , order of spline Returns : list _ of _ knots : rank - 1 array that has ( ` order ` + 1 ) copies of ` ` knots [ 0 ]...
if isinstance ( knots , np . ndarray ) and knots . ndim > 1 : raise ValueError ( "knots must be a list or a rank-1 array" ) knots = list ( knots ) # ensure Python list # One copy of knots [ 0 ] and knots [ - 1 ] will come from " knots " itself , # so we only need to prepend / append " order " copies . return np . a...
def _process_event ( self , data , url , services_incl_filter = None , services_excl_filter = None , custom_tags = None ) : '''Main event processing loop . An event will be created for a service status change . Service checks on the server side can be used to provide the same functionality'''
hostname = data [ 'svname' ] service_name = data [ 'pxname' ] key = "%s:%s" % ( hostname , service_name ) status = self . host_status [ url ] [ key ] custom_tags = [ ] if custom_tags is None else custom_tags if self . _is_service_excl_filtered ( service_name , services_incl_filter , services_excl_filter ) : return ...
def refresh ( self ) : """Reloads the contents for this box based on the parameters . : return < bool >"""
self . setDirty ( False ) self . blockSignals ( True ) self . setUpdatesEnabled ( False ) self . clear ( ) locales = self . _availableLocales if not locales : locales = self . allLocales ( ) if not self . showLanguage ( ) : if self . isTranslated ( ) : sorter = lambda x : x . get_territory_name ( base )...
def setHint ( self , hint ) : """Sets the hint for this widget . : param hint | < str >"""
self . _hint = hint self . detailWidget ( ) . setHint ( hint )
def get_instruction ( self , idx , off = None ) : """Get a particular instruction by using ( default ) the index of the address if specified : param idx : index of the instruction ( the position in the list of the instruction ) : type idx : int : param off : address of the instruction : type off : int : r...
if off != None : idx = self . off_to_pos ( off ) return [ i for i in self . get_instructions ( ) ] [ idx ]
def find_objects ( self , terms = None , type = None , chunksize = None , ** kwargs ) : """Find objects in Fedora . Find query should be generated via keyword args , based on the fields in Fedora documentation . By default , the query uses a contains ( ~ ) search for all search terms . Calls : meth : ` ApiFac...
type = type or self . default_object_type find_opts = { 'chunksize' : chunksize } search_operators = { 'exact' : '=' , 'gt' : '>' , 'gte' : '>=' , 'lt' : '<' , 'lte' : '<=' , 'contains' : '~' } if terms is not None : find_opts [ 'terms' ] = terms else : conditions = [ ] for field , value in six . iteritems ...
def prox_line ( xy , step ) : """2D projection onto 2 lines"""
return np . concatenate ( ( prox_xline ( xy [ 0 ] , step ) , prox_yline ( xy [ 1 ] , step ) ) )
def rotate ( self ) : '''Move the first address to the last position .'''
item = self . _address_infos . pop ( 0 ) self . _address_infos . append ( item )
def rmq_ssl_is_enabled_on_unit ( self , sentry_unit , port = None ) : """Check a single juju rmq unit for ssl and port in the config file ."""
host = sentry_unit . info [ 'public-address' ] unit_name = sentry_unit . info [ 'unit_name' ] conf_file = '/etc/rabbitmq/rabbitmq.config' conf_contents = str ( self . file_contents_safe ( sentry_unit , conf_file , max_wait = 16 ) ) # Checks conf_ssl = 'ssl' in conf_contents conf_port = str ( port ) in conf_contents # P...
def lookup ( name , min_similarity_ratio = .75 ) : """Look up for a Stan function with similar functionality to a Python function ( or even an R function , see examples ) . If the function is not present on the lookup table , then attempts to find similar one and prints the results . This function requires pa...
if lookuptable is None : build ( ) if name not in lookuptable . keys ( ) : from difflib import SequenceMatcher from operator import itemgetter print ( "No match for " + name + " in the lookup table." ) lkt_keys = list ( lookuptable . keys ( ) ) mapfunction = lambda x : SequenceMatcher ( a = name...
def parse_arguments ( ) : """Collect command - line arguments . Let the caller run parse _ args ( ) , as sphinx - argparse requires a function that returns an instance of argparse . ArgumentParser"""
# Pull a few settings from the environment , should they exist base_dn = os . environ [ 'PUDL_BASE_DN' ] if 'PUDL_BASE_DN' in os . environ else 'OU=Departments,DC=example,DC=com' domain = os . environ [ 'PUDL_DOMAIN' ] . upper ( ) if 'PUDL_DOMAIN' in os . environ else 'EXAMPLE' page_size = os . environ [ 'PUDL_PAGE_SIZ...
def resume ( self , container_id = None , sudo = None ) : '''resume a stopped OciImage container , if it exists Equivalent command line example : singularity oci resume < container _ ID > Parameters container _ id : the id to stop . sudo : Add sudo to the command . If the container was created by root , ...
return self . _state_command ( container_id , command = 'resume' , sudo = sudo )
def get_nn_info ( self , structure , n ) : """Get all near - neighbor information . Args : structure : ( Structure ) pymatgen Structure n : ( int ) index of target site Returns : siw ( list of dicts ) : each dictionary provides information about a single near neighbor , where key ' site ' gives access...
nndata = self . get_nn_data ( structure , n ) if not self . weighted_cn : max_key = max ( nndata . cn_weights , key = lambda k : nndata . cn_weights [ k ] ) nn = nndata . cn_nninfo [ max_key ] for entry in nn : entry [ "weight" ] = 1 return nn else : for entry in nndata . all_nninfo : ...
def _attachment_uri ( self , attachid ) : """Returns the URI for the given attachment ID ."""
att_uri = self . url . replace ( 'xmlrpc.cgi' , 'attachment.cgi' ) att_uri = att_uri + '?id=%s' % attachid return att_uri
def calc_one_vert_gauss ( one_vert , xyz = None , std = None ) : """Calculate how many electrodes influence one vertex , using a Gaussian function . Parameters one _ vert : ndarray vector of xyz position of a vertex xyz : ndarray nChan X 3 with the position of all the channels std : float distance i...
trans = empty ( xyz . shape [ 0 ] ) for i , one_xyz in enumerate ( xyz ) : trans [ i ] = gauss ( norm ( one_vert - one_xyz ) , std ) return trans
def setup_timezone ( timezone : str ) -> None : """Shortcut helper to configure timezone for backend application . : param timezone : Timezone to use , e . g . " UTC " , " Europe / Kiev " ."""
if timezone and hasattr ( time , 'tzset' ) : tz_root = '/usr/share/zoneinfo' tz_filename = os . path . join ( tz_root , * ( timezone . split ( '/' ) ) ) if os . path . exists ( tz_root ) and not os . path . exists ( tz_filename ) : raise ValueError ( 'Incorrect timezone value: {0}' . format ( timezo...
def Collect ( self , knowledge_base , artifact_definition , searcher ) : """Collects values using a Windows Registry value artifact definition . Args : knowledge _ base ( KnowledgeBase ) : to fill with preprocessing information . artifact _ definition ( artifacts . ArtifactDefinition ) : artifact definition ....
for source in artifact_definition . sources : if source . type_indicator not in ( artifact_definitions . TYPE_INDICATOR_WINDOWS_REGISTRY_KEY , artifact_definitions . TYPE_INDICATOR_WINDOWS_REGISTRY_VALUE ) : continue if source . type_indicator == ( artifact_definitions . TYPE_INDICATOR_WINDOWS_REGISTRY_...
def _execute ( self , parts , expectation = None , format_callback = None ) : """Really execute a redis command : param list parts : The list of command parts : param mixed expectation : Optional response expectation : rtype : : class : ` ~ tornado . concurrent . Future ` : raises : : exc : ` ~ tredis . exc...
future = concurrent . TracebackFuture ( ) try : command = self . _build_command ( parts ) except ValueError as error : future . set_exception ( error ) return future def on_locked ( _ ) : if self . ready : if self . _clustering : cmd = Command ( command , self . _pick_cluster_host ( ...
def getopenfilenames ( parent = None , caption = '' , basedir = '' , filters = '' , selectedfilter = '' , options = None ) : """Wrapper around QtGui . QFileDialog . getOpenFileNames static method Returns a tuple ( filenames , selectedfilter ) - - when dialog box is canceled , returns a tuple ( empty list , empt...
return _qfiledialog_wrapper ( 'getOpenFileNames' , parent = parent , caption = caption , basedir = basedir , filters = filters , selectedfilter = selectedfilter , options = options )
def find_cross_contamination ( databases , pair , tmpdir = 'tmp' , log = 'log.txt' , threads = 1 ) : """Usese mash to find out whether or not a sample has more than one genus present , indicating cross - contamination . : param databases : A databases folder , which must contain refseq . msh , a mash sketch that ...
genera_present = list ( ) out , err , cmd = mash . screen ( '{}/refseq.msh' . format ( databases ) , pair [ 0 ] , pair [ 1 ] , threads = threads , w = '' , i = '0.95' , output_file = os . path . join ( tmpdir , 'screen.tab' ) , returncmd = True ) write_to_logfile ( log , out , err , cmd ) screen_output = mash . read_ma...
def OnBorderChoice ( self , event ) : """Change the borders that are affected by color and width changes"""
choicelist = event . GetEventObject ( ) . GetItems ( ) self . borderstate = choicelist [ event . GetInt ( ) ]
def _depth_limited_walk ( top , max_depth = None ) : '''Walk the directory tree under root up till reaching max _ depth . With max _ depth = None ( default ) , do not limit depth .'''
for root , dirs , files in salt . utils . path . os_walk ( top ) : if max_depth is not None : rel_depth = root . count ( os . path . sep ) - top . count ( os . path . sep ) if rel_depth >= max_depth : del dirs [ : ] yield ( six . text_type ( root ) , list ( dirs ) , list ( files ) )
def load_delimited ( filename , converters , delimiter = r'\s+' ) : r"""Utility function for loading in data from an annotation file where columns are delimited . The number of columns is inferred from the length of the provided converters list . Examples > > > # Load in a one - column list of event times (...
# Initialize list of empty lists n_columns = len ( converters ) columns = tuple ( list ( ) for _ in range ( n_columns ) ) # Create re object for splitting lines splitter = re . compile ( delimiter ) # Note : we do io manually here for two reasons . # 1 . The csv module has difficulties with unicode , which may lead # t...
def local_port_range ( self ) : """Tuple of ( low _ port , high _ port ) reflecting the local port range assigned to outbound connections . We use this as part of a heuristic to determine whether a connection is inbound or outbound ."""
if self . _local_port_range is None : with open ( '/proc/sys/net/ipv4/ip_local_port_range' , 'r' ) as f : self . _local_port_range = tuple ( map ( int , f . read ( ) . split ( '\t' ) ) ) return self . _local_port_range
def load_data ( handle , reader = None ) : '''Unpack data into a raw data wrapper'''
if not reader : reader = os . path . splitext ( handle ) [ 1 ] [ 1 : ] . lower ( ) if reader not in _READERS : raise NeuroMError ( 'Do not have a loader for "%s" extension' % reader ) filename = _get_file ( handle ) try : return _READERS [ reader ] ( filename ) except Exception as e : L . exception ( 'E...
def dump ( node ) : """Dump initialized object structure to yaml"""
from qubell . api . private . platform import Auth , QubellPlatform from qubell . api . private . organization import Organization from qubell . api . private . application import Application from qubell . api . private . instance import Instance from qubell . api . private . revision import Revision from qubell . api ...
def _addLink ( self , dirTree , dirID , dirSeq , dirPath , name ) : """Add tree reference and name . ( Hardlink ) ."""
logger . debug ( "Link %d-%d-%d '%s%s'" , dirTree , dirID , dirSeq , dirPath , name ) # assert dirTree ! = 0 , ( dirTree , dirID , dirSeq , dirPath , name ) assert ( dirTree , dirID , dirSeq ) not in self . links , ( dirTree , dirID , dirSeq ) self . links [ ( dirTree , dirID , dirSeq ) ] = ( dirPath , name ) assert l...
def show_sbridges ( self ) : """Visualize salt bridges ."""
for i , saltb in enumerate ( self . plcomplex . saltbridges ) : if saltb . protispos : for patom in saltb . positive_atoms : cmd . select ( 'PosCharge-P' , 'PosCharge-P or (id %i & %s)' % ( patom , self . protname ) ) for latom in saltb . negative_atoms : cmd . select ( 'NegC...
def status_log ( func , message , * args , ** kwargs ) : """Emits header message , executes a callable , and echoes the return strings ."""
click . echo ( message ) log = func ( * args , ** kwargs ) if log : out = [ ] for line in log . split ( '\n' ) : if not line . startswith ( '#' ) : out . append ( line ) click . echo ( black ( '\n' . join ( out ) ) )
def setup_local_geometry ( self , isite , coords , optimization = None ) : """Sets up the AbstractGeometry for the local geometry of site with index isite . : param isite : Index of the site for which the local geometry has to be set up : param coords : The coordinates of the ( local ) neighbors"""
self . local_geometry = AbstractGeometry ( central_site = self . structure . cart_coords [ isite ] , bare_coords = coords , centering_type = self . centering_type , include_central_site_in_centroid = self . include_central_site_in_centroid , optimization = optimization )
def epsilon_crit ( self ) : """returns the critical projected mass density in units of M _ sun / Mpc ^ 2 ( physical units ) : return : critical projected mass density"""
if not hasattr ( self , '_Epsilon_Crit' ) : const_SI = const . c ** 2 / ( 4 * np . pi * const . G ) # c ^ 2 / ( 4 * pi * G ) in units of [ kg / m ] conversion = const . Mpc / const . M_sun # converts [ kg / m ] to [ M _ sun / Mpc ] factor = const_SI * conversion # c ^ 2 / ( 4 * pi * G ) in units...
def _binary_insert ( lst , elem , key , lo = 0 , hi = None ) : """Insert an element into a sorted list , and keep the list sorted . The major difference from bisect . bisect _ left is that this function supports a key method , so user doesn ' t have to create the key array for each insertion . : param list ls...
if lo < 0 : raise ValueError ( "lo must be a non-negative number" ) if hi is None : hi = len ( lst ) while lo < hi : mid = ( lo + hi ) // 2 if key ( lst [ mid ] ) < key ( elem ) : lo = mid + 1 else : hi = mid lst . insert ( lo , elem )
def size ( self ) : """Bytes uploaded of the file so far . Note that we only have the file size if the file was requested directly , not if it ' s part of a folder listing ."""
if hasattr ( self . f . latestRevision , 'size' ) : return int ( self . f . latestRevision . size ) return None
def get_version ( ) : """Reads the version ( MAJOR . MINOR ) from this module ."""
release = get_release ( ) split_version = release . split ( "." ) if len ( split_version ) == 3 : return "." . join ( split_version [ : 2 ] ) return release
def set_split_extents_by_split_size ( self ) : """Sets split shape : attr : ` split _ shape ` and split extents ( : attr : ` split _ begs ` and : attr : ` split _ ends ` ) from values in : attr : ` split _ size ` and : attr : ` split _ num _ slices _ per _ axis ` ."""
if self . split_size is None : if ( _np . all ( [ s is not None for s in self . split_num_slices_per_axis ] ) and _np . all ( [ s > 0 for s in self . split_num_slices_per_axis ] ) ) : self . split_size = _np . product ( self . split_num_slices_per_axis ) else : raise ValueError ( ( "Got invalid ...
def iter_packages ( self ) : """Iterate over the packages within this family , in no particular order . Returns : ` Package ` iterator ."""
for package in self . repository . iter_packages ( self . resource ) : yield Package ( package )
def set_include_entities ( self , include ) : """Sets ' include entities ' parameter to either include or exclude the entities node within the results : param include : Boolean to trigger the ' include entities ' parameter : raises : TwitterSearchException"""
if not isinstance ( include , bool ) : raise TwitterSearchException ( 1008 ) self . arguments . update ( { 'include_entities' : 'true' if include else 'false' } )
def parse ( readDataInstance ) : """Returns a new L { NetMetaDataHeader } object . @ type readDataInstance : L { ReadData } @ param readDataInstance : A L { ReadData } object with data to be parsed as a L { NetMetaDataHeader } object . @ rtype : L { NetMetaDataHeader } @ return : A new L { NetMetaDataHeader...
nmh = NetMetaDataHeader ( ) nmh . signature . value = readDataInstance . readDword ( ) nmh . majorVersion . value = readDataInstance . readWord ( ) nmh . minorVersion . value = readDataInstance . readWord ( ) nmh . reserved . value = readDataInstance . readDword ( ) nmh . versionLength . value = readDataInstance . read...
def record_rename ( object_id , input_params = { } , always_retry = True , ** kwargs ) : """Invokes the / record - xxxx / rename API method . For more info , see : https : / / wiki . dnanexus . com / API - Specification - v1.0.0 / Name # API - method % 3A - % 2Fclass - xxxx % 2Frename"""
return DXHTTPRequest ( '/%s/rename' % object_id , input_params , always_retry = always_retry , ** kwargs )
def set_dependent_orders ( self , accountID , tradeSpecifier , ** kwargs ) : """Create , replace and cancel a Trade ' s dependent Orders ( Take Profit , Stop Loss and Trailing Stop Loss ) through the Trade itself Args : accountID : Account Identifier tradeSpecifier : Specifier for the Trade takeProfit...
request = Request ( 'PUT' , '/v3/accounts/{accountID}/trades/{tradeSpecifier}/orders' ) request . set_path_param ( 'accountID' , accountID ) request . set_path_param ( 'tradeSpecifier' , tradeSpecifier ) body = EntityDict ( ) if 'takeProfit' in kwargs : body . set ( 'takeProfit' , kwargs [ 'takeProfit' ] ) if 'stop...
def initialize ( cls ) -> None : """Initializes the ` ` SIGCHLD ` ` handler . The signal handler is run on an ` . IOLoop ` to avoid locking issues . Note that the ` . IOLoop ` used for signal handling need not be the same one used by individual Subprocess objects ( as long as the ` ` IOLoops ` ` are each ru...
if cls . _initialized : return io_loop = ioloop . IOLoop . current ( ) cls . _old_sigchld = signal . signal ( signal . SIGCHLD , lambda sig , frame : io_loop . add_callback_from_signal ( cls . _cleanup ) , ) cls . _initialized = True
def _handle_tag_salt_error ( self , tag , data ) : '''Handle a _ salt _ error event'''
if self . connected : log . debug ( 'Forwarding salt error event tag=%s' , tag ) self . _fire_master ( data , tag )
def new ( self ) : # type : ( ) - > None '''A method to create a new UDF Primary Volume Descriptor . Parameters : None . Returns : Nothing .'''
if self . _initialized : raise pycdlibexception . PyCdlibInternalError ( 'UDF Primary Volume Descriptor already initialized' ) self . desc_tag = UDFTag ( ) self . desc_tag . new ( 1 ) # FIXME : we should let the user set serial _ number self . vol_desc_seqnum = 0 # FIXME : we should let the user set this self . des...
def fit_angle ( fit1 , fit2 , degrees = True ) : """Finds the angle between the nominal vectors"""
return N . degrees ( angle ( fit1 . normal , fit2 . normal ) )
def obtain_all_bond_lengths ( sp1 , sp2 , default_bl = None ) : """Obtain bond lengths for all bond orders from bond length database Args : sp1 ( Specie ) : First specie . sp2 ( Specie ) : Second specie . default _ bl : If a particular type of bond does not exist , use this bond length as a default value ...
if isinstance ( sp1 , Element ) : sp1 = sp1 . symbol if isinstance ( sp2 , Element ) : sp2 = sp2 . symbol syms = tuple ( sorted ( [ sp1 , sp2 ] ) ) if syms in bond_lengths : return bond_lengths [ syms ] . copy ( ) elif default_bl is not None : return { 1 : default_bl } else : raise ValueError ( "No ...
def step ( self , action ) : """Apply sequence of actions to sequence of environments actions - > ( observations , rewards , news ) where ' news ' is a boolean vector indicating whether each element is new ."""
obs , rews , news , infos = self . env . step ( action ) self . ret = self . ret * self . gamma + rews obs = self . _filter_observation ( obs ) if self . ret_rms : self . ret_rms . update ( np . array ( [ self . ret ] ) ) rews = np . clip ( rews / np . sqrt ( self . ret_rms . var + self . epsilon ) , - self . c...
def infer_call_result ( self , caller , context = None ) : """infer what a class is returning when called"""
if ( self . is_subtype_of ( "%s.type" % ( BUILTINS , ) , context ) and len ( caller . args ) == 3 ) : result = self . _infer_type_call ( caller , context ) yield result return dunder_call = None try : metaclass = self . metaclass ( context = context ) if metaclass is not None : dunder_call =...
def run_reports ( reportlets_dir , out_dir , subject_label , run_uuid , config = None , packagename = None ) : """Runs the reports . . testsetup : : > > > from shutil import copytree > > > from tempfile import TemporaryDirectory > > > new _ path = Path ( _ _ file _ _ ) . resolve ( ) . parent . parent > > ...
report = Report ( Path ( reportlets_dir ) , out_dir , run_uuid , config = config , subject_id = subject_label , packagename = packagename ) return report . generate_report ( )
def show_messages ( self ) : """Show all messages ."""
string = self . header if self . static_message is not None : string += self . static_message . to_html ( ) for message in self . dynamic_messages : string += message . to_html ( ) string += self . footer print ( string ) self . setHtml ( string )
def morphological_chan_vese ( image , iterations , init_level_set = 'checkerboard' , smoothing = 1 , lambda1 = 1 , lambda2 = 1 , iter_callback = lambda x : None ) : """Morphological Active Contours without Edges ( MorphACWE ) Active contours without edges implemented with morphological operators . It can be use...
init_level_set = _init_level_set ( init_level_set , image . shape ) _check_input ( image , init_level_set ) u = np . int8 ( init_level_set > 0 ) iter_callback ( u ) for _ in range ( iterations ) : # inside = u > 0 # outside = u < = 0 c0 = ( image * ( 1 - u ) ) . sum ( ) / float ( ( 1 - u ) . sum ( ) + 1e-8 ) c1...
def cas ( self , key , value , cas , time , compress_level = - 1 ) : """Add a key / value to server ony if it does not exist . : param key : Key ' s name : type key : six . string _ types : param value : A value to be stored on server . : type value : object : param time : Time in seconds that your key wi...
# The protocol CAS value 0 means " no cas " . Calling cas ( ) with that value is # probably unintentional . Don ' t allow it , since it would overwrite the value # without performing CAS at all . assert cas != 0 , '0 is an invalid CAS value' # If we get a cas of None , interpret that as " compare against nonexistant an...
def max_drawdown ( returns = None , geometric = True , dd = None , inc_date = False ) : """compute the max draw down . returns : period return Series or DataFrame dd : drawdown Series or DataFrame ( mutually exclusive with returns )"""
if ( returns is None and dd is None ) or ( returns is not None and dd is not None ) : raise ValueError ( 'returns and drawdowns are mutually exclusive' ) if returns is not None : dd = drawdowns ( returns , geometric = geometric ) if isinstance ( dd , pd . DataFrame ) : vals = [ max_drawdown ( dd = dd [ c ] ...
def get_workflow_status ( self , depth = 2 ) : '''Gets the workflow status . Parameters depth : int , optional query depth - in which detail status of subtasks will be queried Returns dict status information about the workflow See also : func : ` tmserver . api . workflow . get _ workflow _ status `...
logger . info ( 'get status for workflow of experiment "%s"' , self . experiment_name ) params = { 'depth' : depth } url = self . _build_api_url ( '/experiments/{experiment_id}/workflow/status' . format ( experiment_id = self . _experiment_id ) , params ) res = self . _session . get ( url ) res . raise_for_status ( ) r...
def head ( self , route : str ( ) , callback : object ( ) ) : """Binds a HEAD route with the given callback : rtype : object"""
self . __set_route ( 'head' , { route : callback } ) return RouteMapping
def accept ( self , evt ) : """write setting to the preferences"""
# determine if application is a script file or frozen exe ( pyinstaller ) frozen = getattr ( sys , 'frozen' , False ) if frozen : app_file = sys . executable else : app_file = PathStr ( __main__ . __file__ ) . abspath ( ) if self . cb_startmenu . isChecked ( ) : # TODO : allow only logo location # icon = app _ ...
def publish ( self , user_id , wifi_fingerprint , action = 'track' , location_id = '' , port = 1883 ) : '''a method to publish wifi fingerprint data to a mosquitto server : param user _ id : string with id of user : param wifi _ fingerprint : list of dictionaries with wifi fields mac and rssi : param action :...
title = '%s.publish' % self . __class__ . __name__ # validate inputs input_fields = { 'user_id' : user_id , 'wifi_fingerprint' : wifi_fingerprint , 'action' : action , 'location_id' : location_id , 'port' : port } for key , value in input_fields . items ( ) : object_title = '%s(%s=%s)' % ( title , key , str ( value...
def hybrid_forward ( self , F , words , weight ) : """Compute embedding of words in batch . Parameters words : mx . nd . NDArray Array of token indices ."""
# pylint : disable = arguments - differ embeddings = F . sparse . dot ( words , weight ) return embeddings
def get_model_creation_kwargs ( model_obj ) : """Get a dictionary of the keyword arguments needed to create the passed model object using ` pylogit . create _ choice _ model ` . Parameters model _ obj : An MNDC _ Model instance . Returns model _ kwargs : dict . Contains the keyword arguments and the req...
# Extract the model abbreviation for this model model_abbrev = get_model_abbrev ( model_obj ) # Create a dictionary to store the keyword arguments needed to Initialize # the new model object . d model_kwargs = { "model_type" : model_abbrev , "names" : model_obj . name_spec , "intercept_names" : model_obj . intercept_na...
def normalized_start ( self ) : """Returns a NamespaceRange with leading non - existant namespaces removed . Returns : A copy of this NamespaceRange whose namespace _ start is adjusted to exclude the portion of the range that contains no actual namespaces in the datastore . None is returned if the Namespace...
namespaces_after_key = list ( self . make_datastore_query ( ) . Run ( limit = 1 ) ) if not namespaces_after_key : return None namespace_after_key = namespaces_after_key [ 0 ] . name ( ) or '' return NamespaceRange ( namespace_after_key , self . namespace_end , _app = self . app )
def _generate_features ( self , feature_extractors ) : """Run all FeatureExtractors and record results in a key - value format . : param feature _ extractors : iterable of ` FeatureExtractor ` objects ."""
results = [ pd . DataFrame ( ) ] n_ext = len ( feature_extractors ) for i , extractor in enumerate ( feature_extractors ) : log . info ( "generating: '%s' (%d/%d)" , extractor . name , i + 1 , n_ext ) cached_extractor = self . _cache [ extractor . name ] if extractor . same ( cached_extractor ) : lo...
def find_coord_vars ( ncds ) : """Finds all coordinate variables in a dataset . A variable with the same name as a dimension is called a coordinate variable ."""
coord_vars = [ ] for d in ncds . dimensions : if d in ncds . variables and ncds . variables [ d ] . dimensions == ( d , ) : coord_vars . append ( ncds . variables [ d ] ) return coord_vars
def link_to_sibling ( self , feed , sibling_type , atom_feed ) : """Adding previous or next links to the given feed self . _ link _ to _ sibling ( feed , ' previous ' , atom _ feed ) self . _ link _ to _ sibling ( feed , ' next ' , atom _ feed ) : param feed : a feed object : param sibling _ type : ' previo...
sibling = self . atom_feed_manager . get_sibling ( feed . id , sibling_type ) if sibling : rel = "prev-archive" if sibling_type == "previous" else "next-archive" atom_feed . link ( href = self . request . route_url ( self . get_atom_feed_url , id = sibling . id ) , rel = rel )